tci.c 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "exec-all.h" /* MAX_OPC_PARAM_IARGS */
  26. #include "tcg-op.h"
  27. /* Marker for missing code. */
  28. #define TODO() \
  29. do { \
  30. fprintf(stderr, "TODO %s:%u: %s()\n", \
  31. __FILE__, __LINE__, __func__); \
  32. tcg_abort(); \
  33. } while (0)
  34. #if MAX_OPC_PARAM_IARGS != 5
  35. # error Fix needed, number of supported input arguments changed!
  36. #endif
  37. #if TCG_TARGET_REG_BITS == 32
  38. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  39. tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong);
  43. #else
  44. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong);
  47. #endif
  48. /* TCI can optionally use a global register variable for env. */
  49. #if !defined(AREG0)
  50. CPUArchState *env;
  51. #endif
  52. /* Targets which don't use GETPC also don't need tci_tb_ptr
  53. which makes them a little faster. */
  54. #if defined(GETPC)
  55. uintptr_t tci_tb_ptr;
  56. #endif
  57. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  58. static tcg_target_ulong tci_read_reg(TCGReg index)
  59. {
  60. assert(index < ARRAY_SIZE(tci_reg));
  61. return tci_reg[index];
  62. }
  63. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  64. static int8_t tci_read_reg8s(TCGReg index)
  65. {
  66. return (int8_t)tci_read_reg(index);
  67. }
  68. #endif
  69. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  70. static int16_t tci_read_reg16s(TCGReg index)
  71. {
  72. return (int16_t)tci_read_reg(index);
  73. }
  74. #endif
  75. #if TCG_TARGET_REG_BITS == 64
  76. static int32_t tci_read_reg32s(TCGReg index)
  77. {
  78. return (int32_t)tci_read_reg(index);
  79. }
  80. #endif
  81. static uint8_t tci_read_reg8(TCGReg index)
  82. {
  83. return (uint8_t)tci_read_reg(index);
  84. }
  85. static uint16_t tci_read_reg16(TCGReg index)
  86. {
  87. return (uint16_t)tci_read_reg(index);
  88. }
  89. static uint32_t tci_read_reg32(TCGReg index)
  90. {
  91. return (uint32_t)tci_read_reg(index);
  92. }
  93. #if TCG_TARGET_REG_BITS == 64
  94. static uint64_t tci_read_reg64(TCGReg index)
  95. {
  96. return tci_read_reg(index);
  97. }
  98. #endif
  99. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  100. {
  101. assert(index < ARRAY_SIZE(tci_reg));
  102. assert(index != TCG_AREG0);
  103. tci_reg[index] = value;
  104. }
  105. static void tci_write_reg8s(TCGReg index, int8_t value)
  106. {
  107. tci_write_reg(index, value);
  108. }
  109. static void tci_write_reg16s(TCGReg index, int16_t value)
  110. {
  111. tci_write_reg(index, value);
  112. }
  113. #if TCG_TARGET_REG_BITS == 64
  114. static void tci_write_reg32s(TCGReg index, int32_t value)
  115. {
  116. tci_write_reg(index, value);
  117. }
  118. #endif
  119. static void tci_write_reg8(TCGReg index, uint8_t value)
  120. {
  121. tci_write_reg(index, value);
  122. }
  123. static void tci_write_reg16(TCGReg index, uint16_t value)
  124. {
  125. tci_write_reg(index, value);
  126. }
  127. static void tci_write_reg32(TCGReg index, uint32_t value)
  128. {
  129. tci_write_reg(index, value);
  130. }
  131. #if TCG_TARGET_REG_BITS == 32
  132. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  133. uint64_t value)
  134. {
  135. tci_write_reg(low_index, value);
  136. tci_write_reg(high_index, value >> 32);
  137. }
  138. #elif TCG_TARGET_REG_BITS == 64
  139. static void tci_write_reg64(TCGReg index, uint64_t value)
  140. {
  141. tci_write_reg(index, value);
  142. }
  143. #endif
  144. #if TCG_TARGET_REG_BITS == 32
  145. /* Create a 64 bit value from two 32 bit values. */
  146. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  147. {
  148. return ((uint64_t)high << 32) + low;
  149. }
  150. #endif
  151. /* Read constant (native size) from bytecode. */
  152. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  153. {
  154. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  155. *tb_ptr += sizeof(value);
  156. return value;
  157. }
  158. /* Read constant (32 bit) from bytecode. */
  159. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  160. {
  161. uint32_t value = *(uint32_t *)(*tb_ptr);
  162. *tb_ptr += sizeof(value);
  163. return value;
  164. }
  165. #if TCG_TARGET_REG_BITS == 64
  166. /* Read constant (64 bit) from bytecode. */
  167. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  168. {
  169. uint64_t value = *(uint64_t *)(*tb_ptr);
  170. *tb_ptr += sizeof(value);
  171. return value;
  172. }
  173. #endif
  174. /* Read indexed register (native size) from bytecode. */
  175. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  176. {
  177. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  178. *tb_ptr += 1;
  179. return value;
  180. }
  181. /* Read indexed register (8 bit) from bytecode. */
  182. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  183. {
  184. uint8_t value = tci_read_reg8(**tb_ptr);
  185. *tb_ptr += 1;
  186. return value;
  187. }
  188. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  189. /* Read indexed register (8 bit signed) from bytecode. */
  190. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  191. {
  192. int8_t value = tci_read_reg8s(**tb_ptr);
  193. *tb_ptr += 1;
  194. return value;
  195. }
  196. #endif
  197. /* Read indexed register (16 bit) from bytecode. */
  198. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  199. {
  200. uint16_t value = tci_read_reg16(**tb_ptr);
  201. *tb_ptr += 1;
  202. return value;
  203. }
  204. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  205. /* Read indexed register (16 bit signed) from bytecode. */
  206. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  207. {
  208. int16_t value = tci_read_reg16s(**tb_ptr);
  209. *tb_ptr += 1;
  210. return value;
  211. }
  212. #endif
  213. /* Read indexed register (32 bit) from bytecode. */
  214. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  215. {
  216. uint32_t value = tci_read_reg32(**tb_ptr);
  217. *tb_ptr += 1;
  218. return value;
  219. }
  220. #if TCG_TARGET_REG_BITS == 32
  221. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  222. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  223. {
  224. uint32_t low = tci_read_r32(tb_ptr);
  225. return tci_uint64(tci_read_r32(tb_ptr), low);
  226. }
  227. #elif TCG_TARGET_REG_BITS == 64
  228. /* Read indexed register (32 bit signed) from bytecode. */
  229. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  230. {
  231. int32_t value = tci_read_reg32s(**tb_ptr);
  232. *tb_ptr += 1;
  233. return value;
  234. }
  235. /* Read indexed register (64 bit) from bytecode. */
  236. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  237. {
  238. uint64_t value = tci_read_reg64(**tb_ptr);
  239. *tb_ptr += 1;
  240. return value;
  241. }
  242. #endif
  243. /* Read indexed register(s) with target address from bytecode. */
  244. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  245. {
  246. target_ulong taddr = tci_read_r(tb_ptr);
  247. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  248. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  249. #endif
  250. return taddr;
  251. }
  252. /* Read indexed register or constant (native size) from bytecode. */
  253. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  254. {
  255. tcg_target_ulong value;
  256. TCGReg r = **tb_ptr;
  257. *tb_ptr += 1;
  258. if (r == TCG_CONST) {
  259. value = tci_read_i(tb_ptr);
  260. } else {
  261. value = tci_read_reg(r);
  262. }
  263. return value;
  264. }
  265. /* Read indexed register or constant (32 bit) from bytecode. */
  266. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  267. {
  268. uint32_t value;
  269. TCGReg r = **tb_ptr;
  270. *tb_ptr += 1;
  271. if (r == TCG_CONST) {
  272. value = tci_read_i32(tb_ptr);
  273. } else {
  274. value = tci_read_reg32(r);
  275. }
  276. return value;
  277. }
  278. #if TCG_TARGET_REG_BITS == 32
  279. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  280. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  281. {
  282. uint32_t low = tci_read_ri32(tb_ptr);
  283. return tci_uint64(tci_read_ri32(tb_ptr), low);
  284. }
  285. #elif TCG_TARGET_REG_BITS == 64
  286. /* Read indexed register or constant (64 bit) from bytecode. */
  287. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  288. {
  289. uint64_t value;
  290. TCGReg r = **tb_ptr;
  291. *tb_ptr += 1;
  292. if (r == TCG_CONST) {
  293. value = tci_read_i64(tb_ptr);
  294. } else {
  295. value = tci_read_reg64(r);
  296. }
  297. return value;
  298. }
  299. #endif
  300. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  301. {
  302. tcg_target_ulong label = tci_read_i(tb_ptr);
  303. assert(label != 0);
  304. return label;
  305. }
  306. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  307. {
  308. bool result = false;
  309. int32_t i0 = u0;
  310. int32_t i1 = u1;
  311. switch (condition) {
  312. case TCG_COND_EQ:
  313. result = (u0 == u1);
  314. break;
  315. case TCG_COND_NE:
  316. result = (u0 != u1);
  317. break;
  318. case TCG_COND_LT:
  319. result = (i0 < i1);
  320. break;
  321. case TCG_COND_GE:
  322. result = (i0 >= i1);
  323. break;
  324. case TCG_COND_LE:
  325. result = (i0 <= i1);
  326. break;
  327. case TCG_COND_GT:
  328. result = (i0 > i1);
  329. break;
  330. case TCG_COND_LTU:
  331. result = (u0 < u1);
  332. break;
  333. case TCG_COND_GEU:
  334. result = (u0 >= u1);
  335. break;
  336. case TCG_COND_LEU:
  337. result = (u0 <= u1);
  338. break;
  339. case TCG_COND_GTU:
  340. result = (u0 > u1);
  341. break;
  342. default:
  343. TODO();
  344. }
  345. return result;
  346. }
  347. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  348. {
  349. bool result = false;
  350. int64_t i0 = u0;
  351. int64_t i1 = u1;
  352. switch (condition) {
  353. case TCG_COND_EQ:
  354. result = (u0 == u1);
  355. break;
  356. case TCG_COND_NE:
  357. result = (u0 != u1);
  358. break;
  359. case TCG_COND_LT:
  360. result = (i0 < i1);
  361. break;
  362. case TCG_COND_GE:
  363. result = (i0 >= i1);
  364. break;
  365. case TCG_COND_LE:
  366. result = (i0 <= i1);
  367. break;
  368. case TCG_COND_GT:
  369. result = (i0 > i1);
  370. break;
  371. case TCG_COND_LTU:
  372. result = (u0 < u1);
  373. break;
  374. case TCG_COND_GEU:
  375. result = (u0 >= u1);
  376. break;
  377. case TCG_COND_LEU:
  378. result = (u0 <= u1);
  379. break;
  380. case TCG_COND_GTU:
  381. result = (u0 > u1);
  382. break;
  383. default:
  384. TODO();
  385. }
  386. return result;
  387. }
  388. /* Interpret pseudo code in tb. */
  389. tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr)
  390. {
  391. tcg_target_ulong next_tb = 0;
  392. env = cpustate;
  393. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  394. assert(tb_ptr);
  395. for (;;) {
  396. #if defined(GETPC)
  397. tci_tb_ptr = (uintptr_t)tb_ptr;
  398. #endif
  399. TCGOpcode opc = tb_ptr[0];
  400. #if !defined(NDEBUG)
  401. uint8_t op_size = tb_ptr[1];
  402. uint8_t *old_code_ptr = tb_ptr;
  403. #endif
  404. tcg_target_ulong t0;
  405. tcg_target_ulong t1;
  406. tcg_target_ulong t2;
  407. tcg_target_ulong label;
  408. TCGCond condition;
  409. target_ulong taddr;
  410. #ifndef CONFIG_SOFTMMU
  411. tcg_target_ulong host_addr;
  412. #endif
  413. uint8_t tmp8;
  414. uint16_t tmp16;
  415. uint32_t tmp32;
  416. uint64_t tmp64;
  417. #if TCG_TARGET_REG_BITS == 32
  418. uint64_t v64;
  419. #endif
  420. /* Skip opcode and size entry. */
  421. tb_ptr += 2;
  422. switch (opc) {
  423. case INDEX_op_end:
  424. case INDEX_op_nop:
  425. break;
  426. case INDEX_op_nop1:
  427. case INDEX_op_nop2:
  428. case INDEX_op_nop3:
  429. case INDEX_op_nopn:
  430. case INDEX_op_discard:
  431. TODO();
  432. break;
  433. case INDEX_op_set_label:
  434. TODO();
  435. break;
  436. case INDEX_op_call:
  437. t0 = tci_read_ri(&tb_ptr);
  438. #if TCG_TARGET_REG_BITS == 32
  439. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  440. tci_read_reg(TCG_REG_R1),
  441. tci_read_reg(TCG_REG_R2),
  442. tci_read_reg(TCG_REG_R3),
  443. tci_read_reg(TCG_REG_R5),
  444. tci_read_reg(TCG_REG_R6),
  445. tci_read_reg(TCG_REG_R7),
  446. tci_read_reg(TCG_REG_R8),
  447. tci_read_reg(TCG_REG_R9),
  448. tci_read_reg(TCG_REG_R10));
  449. tci_write_reg(TCG_REG_R0, tmp64);
  450. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  451. #else
  452. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  453. tci_read_reg(TCG_REG_R1),
  454. tci_read_reg(TCG_REG_R2),
  455. tci_read_reg(TCG_REG_R3),
  456. tci_read_reg(TCG_REG_R5));
  457. tci_write_reg(TCG_REG_R0, tmp64);
  458. #endif
  459. break;
  460. case INDEX_op_jmp:
  461. case INDEX_op_br:
  462. label = tci_read_label(&tb_ptr);
  463. assert(tb_ptr == old_code_ptr + op_size);
  464. tb_ptr = (uint8_t *)label;
  465. continue;
  466. case INDEX_op_setcond_i32:
  467. t0 = *tb_ptr++;
  468. t1 = tci_read_r32(&tb_ptr);
  469. t2 = tci_read_ri32(&tb_ptr);
  470. condition = *tb_ptr++;
  471. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  472. break;
  473. #if TCG_TARGET_REG_BITS == 32
  474. case INDEX_op_setcond2_i32:
  475. t0 = *tb_ptr++;
  476. tmp64 = tci_read_r64(&tb_ptr);
  477. v64 = tci_read_ri64(&tb_ptr);
  478. condition = *tb_ptr++;
  479. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  480. break;
  481. #elif TCG_TARGET_REG_BITS == 64
  482. case INDEX_op_setcond_i64:
  483. t0 = *tb_ptr++;
  484. t1 = tci_read_r64(&tb_ptr);
  485. t2 = tci_read_ri64(&tb_ptr);
  486. condition = *tb_ptr++;
  487. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  488. break;
  489. #endif
  490. case INDEX_op_mov_i32:
  491. t0 = *tb_ptr++;
  492. t1 = tci_read_r32(&tb_ptr);
  493. tci_write_reg32(t0, t1);
  494. break;
  495. case INDEX_op_movi_i32:
  496. t0 = *tb_ptr++;
  497. t1 = tci_read_i32(&tb_ptr);
  498. tci_write_reg32(t0, t1);
  499. break;
  500. /* Load/store operations (32 bit). */
  501. case INDEX_op_ld8u_i32:
  502. t0 = *tb_ptr++;
  503. t1 = tci_read_r(&tb_ptr);
  504. t2 = tci_read_i32(&tb_ptr);
  505. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  506. break;
  507. case INDEX_op_ld8s_i32:
  508. case INDEX_op_ld16u_i32:
  509. TODO();
  510. break;
  511. case INDEX_op_ld16s_i32:
  512. TODO();
  513. break;
  514. case INDEX_op_ld_i32:
  515. t0 = *tb_ptr++;
  516. t1 = tci_read_r(&tb_ptr);
  517. t2 = tci_read_i32(&tb_ptr);
  518. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  519. break;
  520. case INDEX_op_st8_i32:
  521. t0 = tci_read_r8(&tb_ptr);
  522. t1 = tci_read_r(&tb_ptr);
  523. t2 = tci_read_i32(&tb_ptr);
  524. *(uint8_t *)(t1 + t2) = t0;
  525. break;
  526. case INDEX_op_st16_i32:
  527. t0 = tci_read_r16(&tb_ptr);
  528. t1 = tci_read_r(&tb_ptr);
  529. t2 = tci_read_i32(&tb_ptr);
  530. *(uint16_t *)(t1 + t2) = t0;
  531. break;
  532. case INDEX_op_st_i32:
  533. t0 = tci_read_r32(&tb_ptr);
  534. t1 = tci_read_r(&tb_ptr);
  535. t2 = tci_read_i32(&tb_ptr);
  536. *(uint32_t *)(t1 + t2) = t0;
  537. break;
  538. /* Arithmetic operations (32 bit). */
  539. case INDEX_op_add_i32:
  540. t0 = *tb_ptr++;
  541. t1 = tci_read_ri32(&tb_ptr);
  542. t2 = tci_read_ri32(&tb_ptr);
  543. tci_write_reg32(t0, t1 + t2);
  544. break;
  545. case INDEX_op_sub_i32:
  546. t0 = *tb_ptr++;
  547. t1 = tci_read_ri32(&tb_ptr);
  548. t2 = tci_read_ri32(&tb_ptr);
  549. tci_write_reg32(t0, t1 - t2);
  550. break;
  551. case INDEX_op_mul_i32:
  552. t0 = *tb_ptr++;
  553. t1 = tci_read_ri32(&tb_ptr);
  554. t2 = tci_read_ri32(&tb_ptr);
  555. tci_write_reg32(t0, t1 * t2);
  556. break;
  557. #if TCG_TARGET_HAS_div_i32
  558. case INDEX_op_div_i32:
  559. t0 = *tb_ptr++;
  560. t1 = tci_read_ri32(&tb_ptr);
  561. t2 = tci_read_ri32(&tb_ptr);
  562. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  563. break;
  564. case INDEX_op_divu_i32:
  565. t0 = *tb_ptr++;
  566. t1 = tci_read_ri32(&tb_ptr);
  567. t2 = tci_read_ri32(&tb_ptr);
  568. tci_write_reg32(t0, t1 / t2);
  569. break;
  570. case INDEX_op_rem_i32:
  571. t0 = *tb_ptr++;
  572. t1 = tci_read_ri32(&tb_ptr);
  573. t2 = tci_read_ri32(&tb_ptr);
  574. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  575. break;
  576. case INDEX_op_remu_i32:
  577. t0 = *tb_ptr++;
  578. t1 = tci_read_ri32(&tb_ptr);
  579. t2 = tci_read_ri32(&tb_ptr);
  580. tci_write_reg32(t0, t1 % t2);
  581. break;
  582. #elif TCG_TARGET_HAS_div2_i32
  583. case INDEX_op_div2_i32:
  584. case INDEX_op_divu2_i32:
  585. TODO();
  586. break;
  587. #endif
  588. case INDEX_op_and_i32:
  589. t0 = *tb_ptr++;
  590. t1 = tci_read_ri32(&tb_ptr);
  591. t2 = tci_read_ri32(&tb_ptr);
  592. tci_write_reg32(t0, t1 & t2);
  593. break;
  594. case INDEX_op_or_i32:
  595. t0 = *tb_ptr++;
  596. t1 = tci_read_ri32(&tb_ptr);
  597. t2 = tci_read_ri32(&tb_ptr);
  598. tci_write_reg32(t0, t1 | t2);
  599. break;
  600. case INDEX_op_xor_i32:
  601. t0 = *tb_ptr++;
  602. t1 = tci_read_ri32(&tb_ptr);
  603. t2 = tci_read_ri32(&tb_ptr);
  604. tci_write_reg32(t0, t1 ^ t2);
  605. break;
  606. /* Shift/rotate operations (32 bit). */
  607. case INDEX_op_shl_i32:
  608. t0 = *tb_ptr++;
  609. t1 = tci_read_ri32(&tb_ptr);
  610. t2 = tci_read_ri32(&tb_ptr);
  611. tci_write_reg32(t0, t1 << t2);
  612. break;
  613. case INDEX_op_shr_i32:
  614. t0 = *tb_ptr++;
  615. t1 = tci_read_ri32(&tb_ptr);
  616. t2 = tci_read_ri32(&tb_ptr);
  617. tci_write_reg32(t0, t1 >> t2);
  618. break;
  619. case INDEX_op_sar_i32:
  620. t0 = *tb_ptr++;
  621. t1 = tci_read_ri32(&tb_ptr);
  622. t2 = tci_read_ri32(&tb_ptr);
  623. tci_write_reg32(t0, ((int32_t)t1 >> t2));
  624. break;
  625. #if TCG_TARGET_HAS_rot_i32
  626. case INDEX_op_rotl_i32:
  627. t0 = *tb_ptr++;
  628. t1 = tci_read_ri32(&tb_ptr);
  629. t2 = tci_read_ri32(&tb_ptr);
  630. tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
  631. break;
  632. case INDEX_op_rotr_i32:
  633. t0 = *tb_ptr++;
  634. t1 = tci_read_ri32(&tb_ptr);
  635. t2 = tci_read_ri32(&tb_ptr);
  636. tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
  637. break;
  638. #endif
  639. case INDEX_op_brcond_i32:
  640. t0 = tci_read_r32(&tb_ptr);
  641. t1 = tci_read_ri32(&tb_ptr);
  642. condition = *tb_ptr++;
  643. label = tci_read_label(&tb_ptr);
  644. if (tci_compare32(t0, t1, condition)) {
  645. assert(tb_ptr == old_code_ptr + op_size);
  646. tb_ptr = (uint8_t *)label;
  647. continue;
  648. }
  649. break;
  650. #if TCG_TARGET_REG_BITS == 32
  651. case INDEX_op_add2_i32:
  652. t0 = *tb_ptr++;
  653. t1 = *tb_ptr++;
  654. tmp64 = tci_read_r64(&tb_ptr);
  655. tmp64 += tci_read_r64(&tb_ptr);
  656. tci_write_reg64(t1, t0, tmp64);
  657. break;
  658. case INDEX_op_sub2_i32:
  659. t0 = *tb_ptr++;
  660. t1 = *tb_ptr++;
  661. tmp64 = tci_read_r64(&tb_ptr);
  662. tmp64 -= tci_read_r64(&tb_ptr);
  663. tci_write_reg64(t1, t0, tmp64);
  664. break;
  665. case INDEX_op_brcond2_i32:
  666. tmp64 = tci_read_r64(&tb_ptr);
  667. v64 = tci_read_ri64(&tb_ptr);
  668. condition = *tb_ptr++;
  669. label = tci_read_label(&tb_ptr);
  670. if (tci_compare64(tmp64, v64, condition)) {
  671. assert(tb_ptr == old_code_ptr + op_size);
  672. tb_ptr = (uint8_t *)label;
  673. continue;
  674. }
  675. break;
  676. case INDEX_op_mulu2_i32:
  677. t0 = *tb_ptr++;
  678. t1 = *tb_ptr++;
  679. t2 = tci_read_r32(&tb_ptr);
  680. tmp64 = tci_read_r32(&tb_ptr);
  681. tci_write_reg64(t1, t0, t2 * tmp64);
  682. break;
  683. #endif /* TCG_TARGET_REG_BITS == 32 */
  684. #if TCG_TARGET_HAS_ext8s_i32
  685. case INDEX_op_ext8s_i32:
  686. t0 = *tb_ptr++;
  687. t1 = tci_read_r8s(&tb_ptr);
  688. tci_write_reg32(t0, t1);
  689. break;
  690. #endif
  691. #if TCG_TARGET_HAS_ext16s_i32
  692. case INDEX_op_ext16s_i32:
  693. t0 = *tb_ptr++;
  694. t1 = tci_read_r16s(&tb_ptr);
  695. tci_write_reg32(t0, t1);
  696. break;
  697. #endif
  698. #if TCG_TARGET_HAS_ext8u_i32
  699. case INDEX_op_ext8u_i32:
  700. t0 = *tb_ptr++;
  701. t1 = tci_read_r8(&tb_ptr);
  702. tci_write_reg32(t0, t1);
  703. break;
  704. #endif
  705. #if TCG_TARGET_HAS_ext16u_i32
  706. case INDEX_op_ext16u_i32:
  707. t0 = *tb_ptr++;
  708. t1 = tci_read_r16(&tb_ptr);
  709. tci_write_reg32(t0, t1);
  710. break;
  711. #endif
  712. #if TCG_TARGET_HAS_bswap16_i32
  713. case INDEX_op_bswap16_i32:
  714. t0 = *tb_ptr++;
  715. t1 = tci_read_r16(&tb_ptr);
  716. tci_write_reg32(t0, bswap16(t1));
  717. break;
  718. #endif
  719. #if TCG_TARGET_HAS_bswap32_i32
  720. case INDEX_op_bswap32_i32:
  721. t0 = *tb_ptr++;
  722. t1 = tci_read_r32(&tb_ptr);
  723. tci_write_reg32(t0, bswap32(t1));
  724. break;
  725. #endif
  726. #if TCG_TARGET_HAS_not_i32
  727. case INDEX_op_not_i32:
  728. t0 = *tb_ptr++;
  729. t1 = tci_read_r32(&tb_ptr);
  730. tci_write_reg32(t0, ~t1);
  731. break;
  732. #endif
  733. #if TCG_TARGET_HAS_neg_i32
  734. case INDEX_op_neg_i32:
  735. t0 = *tb_ptr++;
  736. t1 = tci_read_r32(&tb_ptr);
  737. tci_write_reg32(t0, -t1);
  738. break;
  739. #endif
  740. #if TCG_TARGET_REG_BITS == 64
  741. case INDEX_op_mov_i64:
  742. t0 = *tb_ptr++;
  743. t1 = tci_read_r64(&tb_ptr);
  744. tci_write_reg64(t0, t1);
  745. break;
  746. case INDEX_op_movi_i64:
  747. t0 = *tb_ptr++;
  748. t1 = tci_read_i64(&tb_ptr);
  749. tci_write_reg64(t0, t1);
  750. break;
  751. /* Load/store operations (64 bit). */
  752. case INDEX_op_ld8u_i64:
  753. t0 = *tb_ptr++;
  754. t1 = tci_read_r(&tb_ptr);
  755. t2 = tci_read_i32(&tb_ptr);
  756. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  757. break;
  758. case INDEX_op_ld8s_i64:
  759. case INDEX_op_ld16u_i64:
  760. case INDEX_op_ld16s_i64:
  761. TODO();
  762. break;
  763. case INDEX_op_ld32u_i64:
  764. t0 = *tb_ptr++;
  765. t1 = tci_read_r(&tb_ptr);
  766. t2 = tci_read_i32(&tb_ptr);
  767. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  768. break;
  769. case INDEX_op_ld32s_i64:
  770. t0 = *tb_ptr++;
  771. t1 = tci_read_r(&tb_ptr);
  772. t2 = tci_read_i32(&tb_ptr);
  773. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  774. break;
  775. case INDEX_op_ld_i64:
  776. t0 = *tb_ptr++;
  777. t1 = tci_read_r(&tb_ptr);
  778. t2 = tci_read_i32(&tb_ptr);
  779. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  780. break;
  781. case INDEX_op_st8_i64:
  782. t0 = tci_read_r8(&tb_ptr);
  783. t1 = tci_read_r(&tb_ptr);
  784. t2 = tci_read_i32(&tb_ptr);
  785. *(uint8_t *)(t1 + t2) = t0;
  786. break;
  787. case INDEX_op_st16_i64:
  788. t0 = tci_read_r16(&tb_ptr);
  789. t1 = tci_read_r(&tb_ptr);
  790. t2 = tci_read_i32(&tb_ptr);
  791. *(uint16_t *)(t1 + t2) = t0;
  792. break;
  793. case INDEX_op_st32_i64:
  794. t0 = tci_read_r32(&tb_ptr);
  795. t1 = tci_read_r(&tb_ptr);
  796. t2 = tci_read_i32(&tb_ptr);
  797. *(uint32_t *)(t1 + t2) = t0;
  798. break;
  799. case INDEX_op_st_i64:
  800. t0 = tci_read_r64(&tb_ptr);
  801. t1 = tci_read_r(&tb_ptr);
  802. t2 = tci_read_i32(&tb_ptr);
  803. *(uint64_t *)(t1 + t2) = t0;
  804. break;
  805. /* Arithmetic operations (64 bit). */
  806. case INDEX_op_add_i64:
  807. t0 = *tb_ptr++;
  808. t1 = tci_read_ri64(&tb_ptr);
  809. t2 = tci_read_ri64(&tb_ptr);
  810. tci_write_reg64(t0, t1 + t2);
  811. break;
  812. case INDEX_op_sub_i64:
  813. t0 = *tb_ptr++;
  814. t1 = tci_read_ri64(&tb_ptr);
  815. t2 = tci_read_ri64(&tb_ptr);
  816. tci_write_reg64(t0, t1 - t2);
  817. break;
  818. case INDEX_op_mul_i64:
  819. t0 = *tb_ptr++;
  820. t1 = tci_read_ri64(&tb_ptr);
  821. t2 = tci_read_ri64(&tb_ptr);
  822. tci_write_reg64(t0, t1 * t2);
  823. break;
  824. #if TCG_TARGET_HAS_div_i64
  825. case INDEX_op_div_i64:
  826. case INDEX_op_divu_i64:
  827. case INDEX_op_rem_i64:
  828. case INDEX_op_remu_i64:
  829. TODO();
  830. break;
  831. #elif TCG_TARGET_HAS_div2_i64
  832. case INDEX_op_div2_i64:
  833. case INDEX_op_divu2_i64:
  834. TODO();
  835. break;
  836. #endif
  837. case INDEX_op_and_i64:
  838. t0 = *tb_ptr++;
  839. t1 = tci_read_ri64(&tb_ptr);
  840. t2 = tci_read_ri64(&tb_ptr);
  841. tci_write_reg64(t0, t1 & t2);
  842. break;
  843. case INDEX_op_or_i64:
  844. t0 = *tb_ptr++;
  845. t1 = tci_read_ri64(&tb_ptr);
  846. t2 = tci_read_ri64(&tb_ptr);
  847. tci_write_reg64(t0, t1 | t2);
  848. break;
  849. case INDEX_op_xor_i64:
  850. t0 = *tb_ptr++;
  851. t1 = tci_read_ri64(&tb_ptr);
  852. t2 = tci_read_ri64(&tb_ptr);
  853. tci_write_reg64(t0, t1 ^ t2);
  854. break;
  855. /* Shift/rotate operations (64 bit). */
  856. case INDEX_op_shl_i64:
  857. t0 = *tb_ptr++;
  858. t1 = tci_read_ri64(&tb_ptr);
  859. t2 = tci_read_ri64(&tb_ptr);
  860. tci_write_reg64(t0, t1 << t2);
  861. break;
  862. case INDEX_op_shr_i64:
  863. t0 = *tb_ptr++;
  864. t1 = tci_read_ri64(&tb_ptr);
  865. t2 = tci_read_ri64(&tb_ptr);
  866. tci_write_reg64(t0, t1 >> t2);
  867. break;
  868. case INDEX_op_sar_i64:
  869. t0 = *tb_ptr++;
  870. t1 = tci_read_ri64(&tb_ptr);
  871. t2 = tci_read_ri64(&tb_ptr);
  872. tci_write_reg64(t0, ((int64_t)t1 >> t2));
  873. break;
  874. #if TCG_TARGET_HAS_rot_i64
  875. case INDEX_op_rotl_i64:
  876. case INDEX_op_rotr_i64:
  877. TODO();
  878. break;
  879. #endif
  880. case INDEX_op_brcond_i64:
  881. t0 = tci_read_r64(&tb_ptr);
  882. t1 = tci_read_ri64(&tb_ptr);
  883. condition = *tb_ptr++;
  884. label = tci_read_label(&tb_ptr);
  885. if (tci_compare64(t0, t1, condition)) {
  886. assert(tb_ptr == old_code_ptr + op_size);
  887. tb_ptr = (uint8_t *)label;
  888. continue;
  889. }
  890. break;
  891. #if TCG_TARGET_HAS_ext8u_i64
  892. case INDEX_op_ext8u_i64:
  893. t0 = *tb_ptr++;
  894. t1 = tci_read_r8(&tb_ptr);
  895. tci_write_reg64(t0, t1);
  896. break;
  897. #endif
  898. #if TCG_TARGET_HAS_ext8s_i64
  899. case INDEX_op_ext8s_i64:
  900. t0 = *tb_ptr++;
  901. t1 = tci_read_r8s(&tb_ptr);
  902. tci_write_reg64(t0, t1);
  903. break;
  904. #endif
  905. #if TCG_TARGET_HAS_ext16s_i64
  906. case INDEX_op_ext16s_i64:
  907. t0 = *tb_ptr++;
  908. t1 = tci_read_r16s(&tb_ptr);
  909. tci_write_reg64(t0, t1);
  910. break;
  911. #endif
  912. #if TCG_TARGET_HAS_ext16u_i64
  913. case INDEX_op_ext16u_i64:
  914. t0 = *tb_ptr++;
  915. t1 = tci_read_r16(&tb_ptr);
  916. tci_write_reg64(t0, t1);
  917. break;
  918. #endif
  919. #if TCG_TARGET_HAS_ext32s_i64
  920. case INDEX_op_ext32s_i64:
  921. t0 = *tb_ptr++;
  922. t1 = tci_read_r32s(&tb_ptr);
  923. tci_write_reg64(t0, t1);
  924. break;
  925. #endif
  926. #if TCG_TARGET_HAS_ext32u_i64
  927. case INDEX_op_ext32u_i64:
  928. t0 = *tb_ptr++;
  929. t1 = tci_read_r32(&tb_ptr);
  930. tci_write_reg64(t0, t1);
  931. break;
  932. #endif
  933. #if TCG_TARGET_HAS_bswap16_i64
  934. case INDEX_op_bswap16_i64:
  935. TODO();
  936. t0 = *tb_ptr++;
  937. t1 = tci_read_r16(&tb_ptr);
  938. tci_write_reg64(t0, bswap16(t1));
  939. break;
  940. #endif
  941. #if TCG_TARGET_HAS_bswap32_i64
  942. case INDEX_op_bswap32_i64:
  943. t0 = *tb_ptr++;
  944. t1 = tci_read_r32(&tb_ptr);
  945. tci_write_reg64(t0, bswap32(t1));
  946. break;
  947. #endif
  948. #if TCG_TARGET_HAS_bswap64_i64
  949. case INDEX_op_bswap64_i64:
  950. t0 = *tb_ptr++;
  951. t1 = tci_read_r64(&tb_ptr);
  952. tci_write_reg64(t0, bswap64(t1));
  953. break;
  954. #endif
  955. #if TCG_TARGET_HAS_not_i64
  956. case INDEX_op_not_i64:
  957. t0 = *tb_ptr++;
  958. t1 = tci_read_r64(&tb_ptr);
  959. tci_write_reg64(t0, ~t1);
  960. break;
  961. #endif
  962. #if TCG_TARGET_HAS_neg_i64
  963. case INDEX_op_neg_i64:
  964. t0 = *tb_ptr++;
  965. t1 = tci_read_r64(&tb_ptr);
  966. tci_write_reg64(t0, -t1);
  967. break;
  968. #endif
  969. #endif /* TCG_TARGET_REG_BITS == 64 */
  970. /* QEMU specific operations. */
  971. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  972. case INDEX_op_debug_insn_start:
  973. TODO();
  974. break;
  975. #else
  976. case INDEX_op_debug_insn_start:
  977. TODO();
  978. break;
  979. #endif
  980. case INDEX_op_exit_tb:
  981. next_tb = *(uint64_t *)tb_ptr;
  982. goto exit;
  983. break;
  984. case INDEX_op_goto_tb:
  985. t0 = tci_read_i32(&tb_ptr);
  986. assert(tb_ptr == old_code_ptr + op_size);
  987. tb_ptr += (int32_t)t0;
  988. continue;
  989. case INDEX_op_qemu_ld8u:
  990. t0 = *tb_ptr++;
  991. taddr = tci_read_ulong(&tb_ptr);
  992. #ifdef CONFIG_SOFTMMU
  993. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  994. #else
  995. host_addr = (tcg_target_ulong)taddr;
  996. assert(taddr == host_addr);
  997. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  998. #endif
  999. tci_write_reg8(t0, tmp8);
  1000. break;
  1001. case INDEX_op_qemu_ld8s:
  1002. t0 = *tb_ptr++;
  1003. taddr = tci_read_ulong(&tb_ptr);
  1004. #ifdef CONFIG_SOFTMMU
  1005. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1006. #else
  1007. host_addr = (tcg_target_ulong)taddr;
  1008. assert(taddr == host_addr);
  1009. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1010. #endif
  1011. tci_write_reg8s(t0, tmp8);
  1012. break;
  1013. case INDEX_op_qemu_ld16u:
  1014. t0 = *tb_ptr++;
  1015. taddr = tci_read_ulong(&tb_ptr);
  1016. #ifdef CONFIG_SOFTMMU
  1017. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1018. #else
  1019. host_addr = (tcg_target_ulong)taddr;
  1020. assert(taddr == host_addr);
  1021. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1022. #endif
  1023. tci_write_reg16(t0, tmp16);
  1024. break;
  1025. case INDEX_op_qemu_ld16s:
  1026. t0 = *tb_ptr++;
  1027. taddr = tci_read_ulong(&tb_ptr);
  1028. #ifdef CONFIG_SOFTMMU
  1029. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1030. #else
  1031. host_addr = (tcg_target_ulong)taddr;
  1032. assert(taddr == host_addr);
  1033. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1034. #endif
  1035. tci_write_reg16s(t0, tmp16);
  1036. break;
  1037. #if TCG_TARGET_REG_BITS == 64
  1038. case INDEX_op_qemu_ld32u:
  1039. t0 = *tb_ptr++;
  1040. taddr = tci_read_ulong(&tb_ptr);
  1041. #ifdef CONFIG_SOFTMMU
  1042. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1043. #else
  1044. host_addr = (tcg_target_ulong)taddr;
  1045. assert(taddr == host_addr);
  1046. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1047. #endif
  1048. tci_write_reg32(t0, tmp32);
  1049. break;
  1050. case INDEX_op_qemu_ld32s:
  1051. t0 = *tb_ptr++;
  1052. taddr = tci_read_ulong(&tb_ptr);
  1053. #ifdef CONFIG_SOFTMMU
  1054. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1055. #else
  1056. host_addr = (tcg_target_ulong)taddr;
  1057. assert(taddr == host_addr);
  1058. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1059. #endif
  1060. tci_write_reg32s(t0, tmp32);
  1061. break;
  1062. #endif /* TCG_TARGET_REG_BITS == 64 */
  1063. case INDEX_op_qemu_ld32:
  1064. t0 = *tb_ptr++;
  1065. taddr = tci_read_ulong(&tb_ptr);
  1066. #ifdef CONFIG_SOFTMMU
  1067. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1068. #else
  1069. host_addr = (tcg_target_ulong)taddr;
  1070. assert(taddr == host_addr);
  1071. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1072. #endif
  1073. tci_write_reg32(t0, tmp32);
  1074. break;
  1075. case INDEX_op_qemu_ld64:
  1076. t0 = *tb_ptr++;
  1077. #if TCG_TARGET_REG_BITS == 32
  1078. t1 = *tb_ptr++;
  1079. #endif
  1080. taddr = tci_read_ulong(&tb_ptr);
  1081. #ifdef CONFIG_SOFTMMU
  1082. tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
  1083. #else
  1084. host_addr = (tcg_target_ulong)taddr;
  1085. assert(taddr == host_addr);
  1086. tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
  1087. #endif
  1088. tci_write_reg(t0, tmp64);
  1089. #if TCG_TARGET_REG_BITS == 32
  1090. tci_write_reg(t1, tmp64 >> 32);
  1091. #endif
  1092. break;
  1093. case INDEX_op_qemu_st8:
  1094. t0 = tci_read_r8(&tb_ptr);
  1095. taddr = tci_read_ulong(&tb_ptr);
  1096. #ifdef CONFIG_SOFTMMU
  1097. t2 = tci_read_i(&tb_ptr);
  1098. helper_stb_mmu(env, taddr, t0, t2);
  1099. #else
  1100. host_addr = (tcg_target_ulong)taddr;
  1101. assert(taddr == host_addr);
  1102. *(uint8_t *)(host_addr + GUEST_BASE) = t0;
  1103. #endif
  1104. break;
  1105. case INDEX_op_qemu_st16:
  1106. t0 = tci_read_r16(&tb_ptr);
  1107. taddr = tci_read_ulong(&tb_ptr);
  1108. #ifdef CONFIG_SOFTMMU
  1109. t2 = tci_read_i(&tb_ptr);
  1110. helper_stw_mmu(env, taddr, t0, t2);
  1111. #else
  1112. host_addr = (tcg_target_ulong)taddr;
  1113. assert(taddr == host_addr);
  1114. *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
  1115. #endif
  1116. break;
  1117. case INDEX_op_qemu_st32:
  1118. t0 = tci_read_r32(&tb_ptr);
  1119. taddr = tci_read_ulong(&tb_ptr);
  1120. #ifdef CONFIG_SOFTMMU
  1121. t2 = tci_read_i(&tb_ptr);
  1122. helper_stl_mmu(env, taddr, t0, t2);
  1123. #else
  1124. host_addr = (tcg_target_ulong)taddr;
  1125. assert(taddr == host_addr);
  1126. *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
  1127. #endif
  1128. break;
  1129. case INDEX_op_qemu_st64:
  1130. tmp64 = tci_read_r64(&tb_ptr);
  1131. taddr = tci_read_ulong(&tb_ptr);
  1132. #ifdef CONFIG_SOFTMMU
  1133. t2 = tci_read_i(&tb_ptr);
  1134. helper_stq_mmu(env, taddr, tmp64, t2);
  1135. #else
  1136. host_addr = (tcg_target_ulong)taddr;
  1137. assert(taddr == host_addr);
  1138. *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
  1139. #endif
  1140. break;
  1141. default:
  1142. TODO();
  1143. break;
  1144. }
  1145. assert(tb_ptr == old_code_ptr + op_size);
  1146. }
  1147. exit:
  1148. return next_tb;
  1149. }