2
0

tci.c 34 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_TCG_DEBUG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "dyngen-exec.h" /* env */
  26. #include "exec-all.h" /* MAX_OPC_PARAM_IARGS */
  27. #include "tcg-op.h"
  28. /* Marker for missing code. */
  29. #define TODO() \
  30. do { \
  31. fprintf(stderr, "TODO %s:%u: %s()\n", \
  32. __FILE__, __LINE__, __func__); \
  33. tcg_abort(); \
  34. } while (0)
  35. #if MAX_OPC_PARAM_IARGS != 4
  36. # error Fix needed, number of supported input arguments changed!
  37. #endif
  38. #if TCG_TARGET_REG_BITS == 32
  39. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong);
  43. #else
  44. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong);
  46. #endif
  47. /* TCI can optionally use a global register variable for env. */
  48. #if !defined(AREG0)
  49. CPUState *env;
  50. #endif
  51. /* Targets which don't use GETPC also don't need tci_tb_ptr
  52. which makes them a little faster. */
  53. #if defined(GETPC)
  54. void *tci_tb_ptr;
  55. #endif
  56. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  57. static tcg_target_ulong tci_read_reg(TCGReg index)
  58. {
  59. assert(index < ARRAY_SIZE(tci_reg));
  60. return tci_reg[index];
  61. }
  62. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  63. static int8_t tci_read_reg8s(TCGReg index)
  64. {
  65. return (int8_t)tci_read_reg(index);
  66. }
  67. #endif
  68. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  69. static int16_t tci_read_reg16s(TCGReg index)
  70. {
  71. return (int16_t)tci_read_reg(index);
  72. }
  73. #endif
  74. #if TCG_TARGET_REG_BITS == 64
  75. static int32_t tci_read_reg32s(TCGReg index)
  76. {
  77. return (int32_t)tci_read_reg(index);
  78. }
  79. #endif
  80. static uint8_t tci_read_reg8(TCGReg index)
  81. {
  82. return (uint8_t)tci_read_reg(index);
  83. }
  84. static uint16_t tci_read_reg16(TCGReg index)
  85. {
  86. return (uint16_t)tci_read_reg(index);
  87. }
  88. static uint32_t tci_read_reg32(TCGReg index)
  89. {
  90. return (uint32_t)tci_read_reg(index);
  91. }
  92. #if TCG_TARGET_REG_BITS == 64
  93. static uint64_t tci_read_reg64(TCGReg index)
  94. {
  95. return tci_read_reg(index);
  96. }
  97. #endif
  98. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  99. {
  100. assert(index < ARRAY_SIZE(tci_reg));
  101. assert(index != TCG_AREG0);
  102. tci_reg[index] = value;
  103. }
  104. static void tci_write_reg8s(TCGReg index, int8_t value)
  105. {
  106. tci_write_reg(index, value);
  107. }
  108. static void tci_write_reg16s(TCGReg index, int16_t value)
  109. {
  110. tci_write_reg(index, value);
  111. }
  112. #if TCG_TARGET_REG_BITS == 64
  113. static void tci_write_reg32s(TCGReg index, int32_t value)
  114. {
  115. tci_write_reg(index, value);
  116. }
  117. #endif
  118. static void tci_write_reg8(TCGReg index, uint8_t value)
  119. {
  120. tci_write_reg(index, value);
  121. }
  122. static void tci_write_reg16(TCGReg index, uint16_t value)
  123. {
  124. tci_write_reg(index, value);
  125. }
  126. static void tci_write_reg32(TCGReg index, uint32_t value)
  127. {
  128. tci_write_reg(index, value);
  129. }
  130. #if TCG_TARGET_REG_BITS == 32
  131. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  132. uint64_t value)
  133. {
  134. tci_write_reg(low_index, value);
  135. tci_write_reg(high_index, value >> 32);
  136. }
  137. #elif TCG_TARGET_REG_BITS == 64
  138. static void tci_write_reg64(TCGReg index, uint64_t value)
  139. {
  140. tci_write_reg(index, value);
  141. }
  142. #endif
  143. #if TCG_TARGET_REG_BITS == 32
  144. /* Create a 64 bit value from two 32 bit values. */
  145. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  146. {
  147. return ((uint64_t)high << 32) + low;
  148. }
  149. #endif
  150. /* Read constant (native size) from bytecode. */
  151. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  152. {
  153. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  154. *tb_ptr += sizeof(value);
  155. return value;
  156. }
  157. /* Read constant (32 bit) from bytecode. */
  158. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  159. {
  160. uint32_t value = *(uint32_t *)(*tb_ptr);
  161. *tb_ptr += sizeof(value);
  162. return value;
  163. }
  164. #if TCG_TARGET_REG_BITS == 64
  165. /* Read constant (64 bit) from bytecode. */
  166. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  167. {
  168. uint64_t value = *(uint64_t *)(*tb_ptr);
  169. *tb_ptr += sizeof(value);
  170. return value;
  171. }
  172. #endif
  173. /* Read indexed register (native size) from bytecode. */
  174. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  175. {
  176. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  177. *tb_ptr += 1;
  178. return value;
  179. }
  180. /* Read indexed register (8 bit) from bytecode. */
  181. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  182. {
  183. uint8_t value = tci_read_reg8(**tb_ptr);
  184. *tb_ptr += 1;
  185. return value;
  186. }
  187. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  188. /* Read indexed register (8 bit signed) from bytecode. */
  189. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  190. {
  191. int8_t value = tci_read_reg8s(**tb_ptr);
  192. *tb_ptr += 1;
  193. return value;
  194. }
  195. #endif
  196. /* Read indexed register (16 bit) from bytecode. */
  197. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  198. {
  199. uint16_t value = tci_read_reg16(**tb_ptr);
  200. *tb_ptr += 1;
  201. return value;
  202. }
  203. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  204. /* Read indexed register (16 bit signed) from bytecode. */
  205. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  206. {
  207. int16_t value = tci_read_reg16s(**tb_ptr);
  208. *tb_ptr += 1;
  209. return value;
  210. }
  211. #endif
  212. /* Read indexed register (32 bit) from bytecode. */
  213. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  214. {
  215. uint32_t value = tci_read_reg32(**tb_ptr);
  216. *tb_ptr += 1;
  217. return value;
  218. }
  219. #if TCG_TARGET_REG_BITS == 32
  220. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  221. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  222. {
  223. uint32_t low = tci_read_r32(tb_ptr);
  224. return tci_uint64(tci_read_r32(tb_ptr), low);
  225. }
  226. #elif TCG_TARGET_REG_BITS == 64
  227. /* Read indexed register (32 bit signed) from bytecode. */
  228. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  229. {
  230. int32_t value = tci_read_reg32s(**tb_ptr);
  231. *tb_ptr += 1;
  232. return value;
  233. }
  234. /* Read indexed register (64 bit) from bytecode. */
  235. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  236. {
  237. uint64_t value = tci_read_reg64(**tb_ptr);
  238. *tb_ptr += 1;
  239. return value;
  240. }
  241. #endif
  242. /* Read indexed register(s) with target address from bytecode. */
  243. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  244. {
  245. target_ulong taddr = tci_read_r(tb_ptr);
  246. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  247. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  248. #endif
  249. return taddr;
  250. }
  251. /* Read indexed register or constant (native size) from bytecode. */
  252. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  253. {
  254. tcg_target_ulong value;
  255. TCGReg r = **tb_ptr;
  256. *tb_ptr += 1;
  257. if (r == TCG_CONST) {
  258. value = tci_read_i(tb_ptr);
  259. } else {
  260. value = tci_read_reg(r);
  261. }
  262. return value;
  263. }
  264. /* Read indexed register or constant (32 bit) from bytecode. */
  265. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  266. {
  267. uint32_t value;
  268. TCGReg r = **tb_ptr;
  269. *tb_ptr += 1;
  270. if (r == TCG_CONST) {
  271. value = tci_read_i32(tb_ptr);
  272. } else {
  273. value = tci_read_reg32(r);
  274. }
  275. return value;
  276. }
  277. #if TCG_TARGET_REG_BITS == 32
  278. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  279. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  280. {
  281. uint32_t low = tci_read_ri32(tb_ptr);
  282. return tci_uint64(tci_read_ri32(tb_ptr), low);
  283. }
  284. #elif TCG_TARGET_REG_BITS == 64
  285. /* Read indexed register or constant (64 bit) from bytecode. */
  286. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  287. {
  288. uint64_t value;
  289. TCGReg r = **tb_ptr;
  290. *tb_ptr += 1;
  291. if (r == TCG_CONST) {
  292. value = tci_read_i64(tb_ptr);
  293. } else {
  294. value = tci_read_reg64(r);
  295. }
  296. return value;
  297. }
  298. #endif
  299. static target_ulong tci_read_label(uint8_t **tb_ptr)
  300. {
  301. target_ulong label = tci_read_i(tb_ptr);
  302. assert(label != 0);
  303. return label;
  304. }
  305. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  306. {
  307. bool result = false;
  308. int32_t i0 = u0;
  309. int32_t i1 = u1;
  310. switch (condition) {
  311. case TCG_COND_EQ:
  312. result = (u0 == u1);
  313. break;
  314. case TCG_COND_NE:
  315. result = (u0 != u1);
  316. break;
  317. case TCG_COND_LT:
  318. result = (i0 < i1);
  319. break;
  320. case TCG_COND_GE:
  321. result = (i0 >= i1);
  322. break;
  323. case TCG_COND_LE:
  324. result = (i0 <= i1);
  325. break;
  326. case TCG_COND_GT:
  327. result = (i0 > i1);
  328. break;
  329. case TCG_COND_LTU:
  330. result = (u0 < u1);
  331. break;
  332. case TCG_COND_GEU:
  333. result = (u0 >= u1);
  334. break;
  335. case TCG_COND_LEU:
  336. result = (u0 <= u1);
  337. break;
  338. case TCG_COND_GTU:
  339. result = (u0 > u1);
  340. break;
  341. default:
  342. TODO();
  343. }
  344. return result;
  345. }
  346. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  347. {
  348. bool result = false;
  349. int64_t i0 = u0;
  350. int64_t i1 = u1;
  351. switch (condition) {
  352. case TCG_COND_EQ:
  353. result = (u0 == u1);
  354. break;
  355. case TCG_COND_NE:
  356. result = (u0 != u1);
  357. break;
  358. case TCG_COND_LT:
  359. result = (i0 < i1);
  360. break;
  361. case TCG_COND_GE:
  362. result = (i0 >= i1);
  363. break;
  364. case TCG_COND_LE:
  365. result = (i0 <= i1);
  366. break;
  367. case TCG_COND_GT:
  368. result = (i0 > i1);
  369. break;
  370. case TCG_COND_LTU:
  371. result = (u0 < u1);
  372. break;
  373. case TCG_COND_GEU:
  374. result = (u0 >= u1);
  375. break;
  376. case TCG_COND_LEU:
  377. result = (u0 <= u1);
  378. break;
  379. case TCG_COND_GTU:
  380. result = (u0 > u1);
  381. break;
  382. default:
  383. TODO();
  384. }
  385. return result;
  386. }
  387. /* Interpret pseudo code in tb. */
  388. unsigned long tcg_qemu_tb_exec(CPUState *cpustate, uint8_t *tb_ptr)
  389. {
  390. unsigned long next_tb = 0;
  391. env = cpustate;
  392. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  393. assert(tb_ptr);
  394. for (;;) {
  395. #if defined(GETPC)
  396. tci_tb_ptr = tb_ptr;
  397. #endif
  398. TCGOpcode opc = tb_ptr[0];
  399. #if !defined(NDEBUG)
  400. uint8_t op_size = tb_ptr[1];
  401. uint8_t *old_code_ptr = tb_ptr;
  402. #endif
  403. tcg_target_ulong t0;
  404. tcg_target_ulong t1;
  405. tcg_target_ulong t2;
  406. tcg_target_ulong label;
  407. TCGCond condition;
  408. target_ulong taddr;
  409. #ifndef CONFIG_SOFTMMU
  410. tcg_target_ulong host_addr;
  411. #endif
  412. uint8_t tmp8;
  413. uint16_t tmp16;
  414. uint32_t tmp32;
  415. uint64_t tmp64;
  416. #if TCG_TARGET_REG_BITS == 32
  417. uint64_t v64;
  418. #endif
  419. /* Skip opcode and size entry. */
  420. tb_ptr += 2;
  421. switch (opc) {
  422. case INDEX_op_end:
  423. case INDEX_op_nop:
  424. break;
  425. case INDEX_op_nop1:
  426. case INDEX_op_nop2:
  427. case INDEX_op_nop3:
  428. case INDEX_op_nopn:
  429. case INDEX_op_discard:
  430. TODO();
  431. break;
  432. case INDEX_op_set_label:
  433. TODO();
  434. break;
  435. case INDEX_op_call:
  436. t0 = tci_read_ri(&tb_ptr);
  437. #if TCG_TARGET_REG_BITS == 32
  438. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  439. tci_read_reg(TCG_REG_R1),
  440. tci_read_reg(TCG_REG_R2),
  441. tci_read_reg(TCG_REG_R3),
  442. tci_read_reg(TCG_REG_R5),
  443. tci_read_reg(TCG_REG_R6),
  444. tci_read_reg(TCG_REG_R7),
  445. tci_read_reg(TCG_REG_R8));
  446. tci_write_reg(TCG_REG_R0, tmp64);
  447. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  448. #else
  449. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  450. tci_read_reg(TCG_REG_R1),
  451. tci_read_reg(TCG_REG_R2),
  452. tci_read_reg(TCG_REG_R3));
  453. tci_write_reg(TCG_REG_R0, tmp64);
  454. #endif
  455. break;
  456. case INDEX_op_jmp:
  457. case INDEX_op_br:
  458. label = tci_read_label(&tb_ptr);
  459. assert(tb_ptr == old_code_ptr + op_size);
  460. tb_ptr = (uint8_t *)label;
  461. continue;
  462. case INDEX_op_setcond_i32:
  463. t0 = *tb_ptr++;
  464. t1 = tci_read_r32(&tb_ptr);
  465. t2 = tci_read_ri32(&tb_ptr);
  466. condition = *tb_ptr++;
  467. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  468. break;
  469. #if TCG_TARGET_REG_BITS == 32
  470. case INDEX_op_setcond2_i32:
  471. t0 = *tb_ptr++;
  472. tmp64 = tci_read_r64(&tb_ptr);
  473. v64 = tci_read_ri64(&tb_ptr);
  474. condition = *tb_ptr++;
  475. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  476. break;
  477. #elif TCG_TARGET_REG_BITS == 64
  478. case INDEX_op_setcond_i64:
  479. t0 = *tb_ptr++;
  480. t1 = tci_read_r64(&tb_ptr);
  481. t2 = tci_read_ri64(&tb_ptr);
  482. condition = *tb_ptr++;
  483. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  484. break;
  485. #endif
  486. case INDEX_op_mov_i32:
  487. t0 = *tb_ptr++;
  488. t1 = tci_read_r32(&tb_ptr);
  489. tci_write_reg32(t0, t1);
  490. break;
  491. case INDEX_op_movi_i32:
  492. t0 = *tb_ptr++;
  493. t1 = tci_read_i32(&tb_ptr);
  494. tci_write_reg32(t0, t1);
  495. break;
  496. /* Load/store operations (32 bit). */
  497. case INDEX_op_ld8u_i32:
  498. t0 = *tb_ptr++;
  499. t1 = tci_read_r(&tb_ptr);
  500. t2 = tci_read_i32(&tb_ptr);
  501. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  502. break;
  503. case INDEX_op_ld8s_i32:
  504. case INDEX_op_ld16u_i32:
  505. TODO();
  506. break;
  507. case INDEX_op_ld16s_i32:
  508. TODO();
  509. break;
  510. case INDEX_op_ld_i32:
  511. t0 = *tb_ptr++;
  512. t1 = tci_read_r(&tb_ptr);
  513. t2 = tci_read_i32(&tb_ptr);
  514. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  515. break;
  516. case INDEX_op_st8_i32:
  517. t0 = tci_read_r8(&tb_ptr);
  518. t1 = tci_read_r(&tb_ptr);
  519. t2 = tci_read_i32(&tb_ptr);
  520. *(uint8_t *)(t1 + t2) = t0;
  521. break;
  522. case INDEX_op_st16_i32:
  523. t0 = tci_read_r16(&tb_ptr);
  524. t1 = tci_read_r(&tb_ptr);
  525. t2 = tci_read_i32(&tb_ptr);
  526. *(uint16_t *)(t1 + t2) = t0;
  527. break;
  528. case INDEX_op_st_i32:
  529. t0 = tci_read_r32(&tb_ptr);
  530. t1 = tci_read_r(&tb_ptr);
  531. t2 = tci_read_i32(&tb_ptr);
  532. *(uint32_t *)(t1 + t2) = t0;
  533. break;
  534. /* Arithmetic operations (32 bit). */
  535. case INDEX_op_add_i32:
  536. t0 = *tb_ptr++;
  537. t1 = tci_read_ri32(&tb_ptr);
  538. t2 = tci_read_ri32(&tb_ptr);
  539. tci_write_reg32(t0, t1 + t2);
  540. break;
  541. case INDEX_op_sub_i32:
  542. t0 = *tb_ptr++;
  543. t1 = tci_read_ri32(&tb_ptr);
  544. t2 = tci_read_ri32(&tb_ptr);
  545. tci_write_reg32(t0, t1 - t2);
  546. break;
  547. case INDEX_op_mul_i32:
  548. t0 = *tb_ptr++;
  549. t1 = tci_read_ri32(&tb_ptr);
  550. t2 = tci_read_ri32(&tb_ptr);
  551. tci_write_reg32(t0, t1 * t2);
  552. break;
  553. #if TCG_TARGET_HAS_div_i32
  554. case INDEX_op_div_i32:
  555. t0 = *tb_ptr++;
  556. t1 = tci_read_ri32(&tb_ptr);
  557. t2 = tci_read_ri32(&tb_ptr);
  558. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  559. break;
  560. case INDEX_op_divu_i32:
  561. t0 = *tb_ptr++;
  562. t1 = tci_read_ri32(&tb_ptr);
  563. t2 = tci_read_ri32(&tb_ptr);
  564. tci_write_reg32(t0, t1 / t2);
  565. break;
  566. case INDEX_op_rem_i32:
  567. t0 = *tb_ptr++;
  568. t1 = tci_read_ri32(&tb_ptr);
  569. t2 = tci_read_ri32(&tb_ptr);
  570. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  571. break;
  572. case INDEX_op_remu_i32:
  573. t0 = *tb_ptr++;
  574. t1 = tci_read_ri32(&tb_ptr);
  575. t2 = tci_read_ri32(&tb_ptr);
  576. tci_write_reg32(t0, t1 % t2);
  577. break;
  578. #elif TCG_TARGET_HAS_div2_i32
  579. case INDEX_op_div2_i32:
  580. case INDEX_op_divu2_i32:
  581. TODO();
  582. break;
  583. #endif
  584. case INDEX_op_and_i32:
  585. t0 = *tb_ptr++;
  586. t1 = tci_read_ri32(&tb_ptr);
  587. t2 = tci_read_ri32(&tb_ptr);
  588. tci_write_reg32(t0, t1 & t2);
  589. break;
  590. case INDEX_op_or_i32:
  591. t0 = *tb_ptr++;
  592. t1 = tci_read_ri32(&tb_ptr);
  593. t2 = tci_read_ri32(&tb_ptr);
  594. tci_write_reg32(t0, t1 | t2);
  595. break;
  596. case INDEX_op_xor_i32:
  597. t0 = *tb_ptr++;
  598. t1 = tci_read_ri32(&tb_ptr);
  599. t2 = tci_read_ri32(&tb_ptr);
  600. tci_write_reg32(t0, t1 ^ t2);
  601. break;
  602. /* Shift/rotate operations (32 bit). */
  603. case INDEX_op_shl_i32:
  604. t0 = *tb_ptr++;
  605. t1 = tci_read_ri32(&tb_ptr);
  606. t2 = tci_read_ri32(&tb_ptr);
  607. tci_write_reg32(t0, t1 << t2);
  608. break;
  609. case INDEX_op_shr_i32:
  610. t0 = *tb_ptr++;
  611. t1 = tci_read_ri32(&tb_ptr);
  612. t2 = tci_read_ri32(&tb_ptr);
  613. tci_write_reg32(t0, t1 >> t2);
  614. break;
  615. case INDEX_op_sar_i32:
  616. t0 = *tb_ptr++;
  617. t1 = tci_read_ri32(&tb_ptr);
  618. t2 = tci_read_ri32(&tb_ptr);
  619. tci_write_reg32(t0, ((int32_t)t1 >> t2));
  620. break;
  621. #if TCG_TARGET_HAS_rot_i32
  622. case INDEX_op_rotl_i32:
  623. t0 = *tb_ptr++;
  624. t1 = tci_read_ri32(&tb_ptr);
  625. t2 = tci_read_ri32(&tb_ptr);
  626. tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
  627. break;
  628. case INDEX_op_rotr_i32:
  629. t0 = *tb_ptr++;
  630. t1 = tci_read_ri32(&tb_ptr);
  631. t2 = tci_read_ri32(&tb_ptr);
  632. tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
  633. break;
  634. #endif
  635. case INDEX_op_brcond_i32:
  636. t0 = tci_read_r32(&tb_ptr);
  637. t1 = tci_read_ri32(&tb_ptr);
  638. condition = *tb_ptr++;
  639. label = tci_read_label(&tb_ptr);
  640. if (tci_compare32(t0, t1, condition)) {
  641. assert(tb_ptr == old_code_ptr + op_size);
  642. tb_ptr = (uint8_t *)label;
  643. continue;
  644. }
  645. break;
  646. #if TCG_TARGET_REG_BITS == 32
  647. case INDEX_op_add2_i32:
  648. t0 = *tb_ptr++;
  649. t1 = *tb_ptr++;
  650. tmp64 = tci_read_r64(&tb_ptr);
  651. tmp64 += tci_read_r64(&tb_ptr);
  652. tci_write_reg64(t1, t0, tmp64);
  653. break;
  654. case INDEX_op_sub2_i32:
  655. t0 = *tb_ptr++;
  656. t1 = *tb_ptr++;
  657. tmp64 = tci_read_r64(&tb_ptr);
  658. tmp64 -= tci_read_r64(&tb_ptr);
  659. tci_write_reg64(t1, t0, tmp64);
  660. break;
  661. case INDEX_op_brcond2_i32:
  662. tmp64 = tci_read_r64(&tb_ptr);
  663. v64 = tci_read_ri64(&tb_ptr);
  664. condition = *tb_ptr++;
  665. label = tci_read_label(&tb_ptr);
  666. if (tci_compare64(tmp64, v64, condition)) {
  667. assert(tb_ptr == old_code_ptr + op_size);
  668. tb_ptr = (uint8_t *)label;
  669. continue;
  670. }
  671. break;
  672. case INDEX_op_mulu2_i32:
  673. t0 = *tb_ptr++;
  674. t1 = *tb_ptr++;
  675. t2 = tci_read_r32(&tb_ptr);
  676. tmp64 = tci_read_r32(&tb_ptr);
  677. tci_write_reg64(t1, t0, t2 * tmp64);
  678. break;
  679. #endif /* TCG_TARGET_REG_BITS == 32 */
  680. #if TCG_TARGET_HAS_ext8s_i32
  681. case INDEX_op_ext8s_i32:
  682. t0 = *tb_ptr++;
  683. t1 = tci_read_r8s(&tb_ptr);
  684. tci_write_reg32(t0, t1);
  685. break;
  686. #endif
  687. #if TCG_TARGET_HAS_ext16s_i32
  688. case INDEX_op_ext16s_i32:
  689. t0 = *tb_ptr++;
  690. t1 = tci_read_r16s(&tb_ptr);
  691. tci_write_reg32(t0, t1);
  692. break;
  693. #endif
  694. #if TCG_TARGET_HAS_ext8u_i32
  695. case INDEX_op_ext8u_i32:
  696. t0 = *tb_ptr++;
  697. t1 = tci_read_r8(&tb_ptr);
  698. tci_write_reg32(t0, t1);
  699. break;
  700. #endif
  701. #if TCG_TARGET_HAS_ext16u_i32
  702. case INDEX_op_ext16u_i32:
  703. t0 = *tb_ptr++;
  704. t1 = tci_read_r16(&tb_ptr);
  705. tci_write_reg32(t0, t1);
  706. break;
  707. #endif
  708. #if TCG_TARGET_HAS_bswap16_i32
  709. case INDEX_op_bswap16_i32:
  710. t0 = *tb_ptr++;
  711. t1 = tci_read_r16(&tb_ptr);
  712. tci_write_reg32(t0, bswap16(t1));
  713. break;
  714. #endif
  715. #if TCG_TARGET_HAS_bswap32_i32
  716. case INDEX_op_bswap32_i32:
  717. t0 = *tb_ptr++;
  718. t1 = tci_read_r32(&tb_ptr);
  719. tci_write_reg32(t0, bswap32(t1));
  720. break;
  721. #endif
  722. #if TCG_TARGET_HAS_not_i32
  723. case INDEX_op_not_i32:
  724. t0 = *tb_ptr++;
  725. t1 = tci_read_r32(&tb_ptr);
  726. tci_write_reg32(t0, ~t1);
  727. break;
  728. #endif
  729. #if TCG_TARGET_HAS_neg_i32
  730. case INDEX_op_neg_i32:
  731. t0 = *tb_ptr++;
  732. t1 = tci_read_r32(&tb_ptr);
  733. tci_write_reg32(t0, -t1);
  734. break;
  735. #endif
  736. #if TCG_TARGET_REG_BITS == 64
  737. case INDEX_op_mov_i64:
  738. t0 = *tb_ptr++;
  739. t1 = tci_read_r64(&tb_ptr);
  740. tci_write_reg64(t0, t1);
  741. break;
  742. case INDEX_op_movi_i64:
  743. t0 = *tb_ptr++;
  744. t1 = tci_read_i64(&tb_ptr);
  745. tci_write_reg64(t0, t1);
  746. break;
  747. /* Load/store operations (64 bit). */
  748. case INDEX_op_ld8u_i64:
  749. t0 = *tb_ptr++;
  750. t1 = tci_read_r(&tb_ptr);
  751. t2 = tci_read_i32(&tb_ptr);
  752. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  753. break;
  754. case INDEX_op_ld8s_i64:
  755. case INDEX_op_ld16u_i64:
  756. case INDEX_op_ld16s_i64:
  757. TODO();
  758. break;
  759. case INDEX_op_ld32u_i64:
  760. t0 = *tb_ptr++;
  761. t1 = tci_read_r(&tb_ptr);
  762. t2 = tci_read_i32(&tb_ptr);
  763. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  764. break;
  765. case INDEX_op_ld32s_i64:
  766. t0 = *tb_ptr++;
  767. t1 = tci_read_r(&tb_ptr);
  768. t2 = tci_read_i32(&tb_ptr);
  769. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  770. break;
  771. case INDEX_op_ld_i64:
  772. t0 = *tb_ptr++;
  773. t1 = tci_read_r(&tb_ptr);
  774. t2 = tci_read_i32(&tb_ptr);
  775. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  776. break;
  777. case INDEX_op_st8_i64:
  778. t0 = tci_read_r8(&tb_ptr);
  779. t1 = tci_read_r(&tb_ptr);
  780. t2 = tci_read_i32(&tb_ptr);
  781. *(uint8_t *)(t1 + t2) = t0;
  782. break;
  783. case INDEX_op_st16_i64:
  784. t0 = tci_read_r16(&tb_ptr);
  785. t1 = tci_read_r(&tb_ptr);
  786. t2 = tci_read_i32(&tb_ptr);
  787. *(uint16_t *)(t1 + t2) = t0;
  788. break;
  789. case INDEX_op_st32_i64:
  790. t0 = tci_read_r32(&tb_ptr);
  791. t1 = tci_read_r(&tb_ptr);
  792. t2 = tci_read_i32(&tb_ptr);
  793. *(uint32_t *)(t1 + t2) = t0;
  794. break;
  795. case INDEX_op_st_i64:
  796. t0 = tci_read_r64(&tb_ptr);
  797. t1 = tci_read_r(&tb_ptr);
  798. t2 = tci_read_i32(&tb_ptr);
  799. *(uint64_t *)(t1 + t2) = t0;
  800. break;
  801. /* Arithmetic operations (64 bit). */
  802. case INDEX_op_add_i64:
  803. t0 = *tb_ptr++;
  804. t1 = tci_read_ri64(&tb_ptr);
  805. t2 = tci_read_ri64(&tb_ptr);
  806. tci_write_reg64(t0, t1 + t2);
  807. break;
  808. case INDEX_op_sub_i64:
  809. t0 = *tb_ptr++;
  810. t1 = tci_read_ri64(&tb_ptr);
  811. t2 = tci_read_ri64(&tb_ptr);
  812. tci_write_reg64(t0, t1 - t2);
  813. break;
  814. case INDEX_op_mul_i64:
  815. t0 = *tb_ptr++;
  816. t1 = tci_read_ri64(&tb_ptr);
  817. t2 = tci_read_ri64(&tb_ptr);
  818. tci_write_reg64(t0, t1 * t2);
  819. break;
  820. #if TCG_TARGET_HAS_div_i64
  821. case INDEX_op_div_i64:
  822. case INDEX_op_divu_i64:
  823. case INDEX_op_rem_i64:
  824. case INDEX_op_remu_i64:
  825. TODO();
  826. break;
  827. #elif TCG_TARGET_HAS_div2_i64
  828. case INDEX_op_div2_i64:
  829. case INDEX_op_divu2_i64:
  830. TODO();
  831. break;
  832. #endif
  833. case INDEX_op_and_i64:
  834. t0 = *tb_ptr++;
  835. t1 = tci_read_ri64(&tb_ptr);
  836. t2 = tci_read_ri64(&tb_ptr);
  837. tci_write_reg64(t0, t1 & t2);
  838. break;
  839. case INDEX_op_or_i64:
  840. t0 = *tb_ptr++;
  841. t1 = tci_read_ri64(&tb_ptr);
  842. t2 = tci_read_ri64(&tb_ptr);
  843. tci_write_reg64(t0, t1 | t2);
  844. break;
  845. case INDEX_op_xor_i64:
  846. t0 = *tb_ptr++;
  847. t1 = tci_read_ri64(&tb_ptr);
  848. t2 = tci_read_ri64(&tb_ptr);
  849. tci_write_reg64(t0, t1 ^ t2);
  850. break;
  851. /* Shift/rotate operations (64 bit). */
  852. case INDEX_op_shl_i64:
  853. t0 = *tb_ptr++;
  854. t1 = tci_read_ri64(&tb_ptr);
  855. t2 = tci_read_ri64(&tb_ptr);
  856. tci_write_reg64(t0, t1 << t2);
  857. break;
  858. case INDEX_op_shr_i64:
  859. t0 = *tb_ptr++;
  860. t1 = tci_read_ri64(&tb_ptr);
  861. t2 = tci_read_ri64(&tb_ptr);
  862. tci_write_reg64(t0, t1 >> t2);
  863. break;
  864. case INDEX_op_sar_i64:
  865. t0 = *tb_ptr++;
  866. t1 = tci_read_ri64(&tb_ptr);
  867. t2 = tci_read_ri64(&tb_ptr);
  868. tci_write_reg64(t0, ((int64_t)t1 >> t2));
  869. break;
  870. #if TCG_TARGET_HAS_rot_i64
  871. case INDEX_op_rotl_i64:
  872. case INDEX_op_rotr_i64:
  873. TODO();
  874. break;
  875. #endif
  876. case INDEX_op_brcond_i64:
  877. t0 = tci_read_r64(&tb_ptr);
  878. t1 = tci_read_ri64(&tb_ptr);
  879. condition = *tb_ptr++;
  880. label = tci_read_label(&tb_ptr);
  881. if (tci_compare64(t0, t1, condition)) {
  882. assert(tb_ptr == old_code_ptr + op_size);
  883. tb_ptr = (uint8_t *)label;
  884. continue;
  885. }
  886. break;
  887. #if TCG_TARGET_HAS_ext8u_i64
  888. case INDEX_op_ext8u_i64:
  889. t0 = *tb_ptr++;
  890. t1 = tci_read_r8(&tb_ptr);
  891. tci_write_reg64(t0, t1);
  892. break;
  893. #endif
  894. #if TCG_TARGET_HAS_ext8s_i64
  895. case INDEX_op_ext8s_i64:
  896. t0 = *tb_ptr++;
  897. t1 = tci_read_r8s(&tb_ptr);
  898. tci_write_reg64(t0, t1);
  899. break;
  900. #endif
  901. #if TCG_TARGET_HAS_ext16s_i64
  902. case INDEX_op_ext16s_i64:
  903. t0 = *tb_ptr++;
  904. t1 = tci_read_r16s(&tb_ptr);
  905. tci_write_reg64(t0, t1);
  906. break;
  907. #endif
  908. #if TCG_TARGET_HAS_ext16u_i64
  909. case INDEX_op_ext16u_i64:
  910. t0 = *tb_ptr++;
  911. t1 = tci_read_r16(&tb_ptr);
  912. tci_write_reg64(t0, t1);
  913. break;
  914. #endif
  915. #if TCG_TARGET_HAS_ext32s_i64
  916. case INDEX_op_ext32s_i64:
  917. t0 = *tb_ptr++;
  918. t1 = tci_read_r32s(&tb_ptr);
  919. tci_write_reg64(t0, t1);
  920. break;
  921. #endif
  922. #if TCG_TARGET_HAS_ext32u_i64
  923. case INDEX_op_ext32u_i64:
  924. t0 = *tb_ptr++;
  925. t1 = tci_read_r32(&tb_ptr);
  926. tci_write_reg64(t0, t1);
  927. break;
  928. #endif
  929. #if TCG_TARGET_HAS_bswap16_i64
  930. case INDEX_op_bswap16_i64:
  931. TODO();
  932. t0 = *tb_ptr++;
  933. t1 = tci_read_r16(&tb_ptr);
  934. tci_write_reg64(t0, bswap16(t1));
  935. break;
  936. #endif
  937. #if TCG_TARGET_HAS_bswap32_i64
  938. case INDEX_op_bswap32_i64:
  939. t0 = *tb_ptr++;
  940. t1 = tci_read_r32(&tb_ptr);
  941. tci_write_reg64(t0, bswap32(t1));
  942. break;
  943. #endif
  944. #if TCG_TARGET_HAS_bswap64_i64
  945. case INDEX_op_bswap64_i64:
  946. TODO();
  947. t0 = *tb_ptr++;
  948. t1 = tci_read_r64(&tb_ptr);
  949. tci_write_reg64(t0, bswap64(t1));
  950. break;
  951. #endif
  952. #if TCG_TARGET_HAS_not_i64
  953. case INDEX_op_not_i64:
  954. t0 = *tb_ptr++;
  955. t1 = tci_read_r64(&tb_ptr);
  956. tci_write_reg64(t0, ~t1);
  957. break;
  958. #endif
  959. #if TCG_TARGET_HAS_neg_i64
  960. case INDEX_op_neg_i64:
  961. t0 = *tb_ptr++;
  962. t1 = tci_read_r64(&tb_ptr);
  963. tci_write_reg64(t0, -t1);
  964. break;
  965. #endif
  966. #endif /* TCG_TARGET_REG_BITS == 64 */
  967. /* QEMU specific operations. */
  968. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  969. case INDEX_op_debug_insn_start:
  970. TODO();
  971. break;
  972. #else
  973. case INDEX_op_debug_insn_start:
  974. TODO();
  975. break;
  976. #endif
  977. case INDEX_op_exit_tb:
  978. next_tb = *(uint64_t *)tb_ptr;
  979. goto exit;
  980. break;
  981. case INDEX_op_goto_tb:
  982. t0 = tci_read_i32(&tb_ptr);
  983. assert(tb_ptr == old_code_ptr + op_size);
  984. tb_ptr += (int32_t)t0;
  985. continue;
  986. case INDEX_op_qemu_ld8u:
  987. t0 = *tb_ptr++;
  988. taddr = tci_read_ulong(&tb_ptr);
  989. #ifdef CONFIG_SOFTMMU
  990. tmp8 = __ldb_mmu(taddr, tci_read_i(&tb_ptr));
  991. #else
  992. host_addr = (tcg_target_ulong)taddr;
  993. assert(taddr == host_addr);
  994. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  995. #endif
  996. tci_write_reg8(t0, tmp8);
  997. break;
  998. case INDEX_op_qemu_ld8s:
  999. t0 = *tb_ptr++;
  1000. taddr = tci_read_ulong(&tb_ptr);
  1001. #ifdef CONFIG_SOFTMMU
  1002. tmp8 = __ldb_mmu(taddr, tci_read_i(&tb_ptr));
  1003. #else
  1004. host_addr = (tcg_target_ulong)taddr;
  1005. assert(taddr == host_addr);
  1006. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1007. #endif
  1008. tci_write_reg8s(t0, tmp8);
  1009. break;
  1010. case INDEX_op_qemu_ld16u:
  1011. t0 = *tb_ptr++;
  1012. taddr = tci_read_ulong(&tb_ptr);
  1013. #ifdef CONFIG_SOFTMMU
  1014. tmp16 = __ldw_mmu(taddr, tci_read_i(&tb_ptr));
  1015. #else
  1016. host_addr = (tcg_target_ulong)taddr;
  1017. assert(taddr == host_addr);
  1018. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1019. #endif
  1020. tci_write_reg16(t0, tmp16);
  1021. break;
  1022. case INDEX_op_qemu_ld16s:
  1023. t0 = *tb_ptr++;
  1024. taddr = tci_read_ulong(&tb_ptr);
  1025. #ifdef CONFIG_SOFTMMU
  1026. tmp16 = __ldw_mmu(taddr, tci_read_i(&tb_ptr));
  1027. #else
  1028. host_addr = (tcg_target_ulong)taddr;
  1029. assert(taddr == host_addr);
  1030. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1031. #endif
  1032. tci_write_reg16s(t0, tmp16);
  1033. break;
  1034. #if TCG_TARGET_REG_BITS == 64
  1035. case INDEX_op_qemu_ld32u:
  1036. t0 = *tb_ptr++;
  1037. taddr = tci_read_ulong(&tb_ptr);
  1038. #ifdef CONFIG_SOFTMMU
  1039. tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
  1040. #else
  1041. host_addr = (tcg_target_ulong)taddr;
  1042. assert(taddr == host_addr);
  1043. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1044. #endif
  1045. tci_write_reg32(t0, tmp32);
  1046. break;
  1047. case INDEX_op_qemu_ld32s:
  1048. t0 = *tb_ptr++;
  1049. taddr = tci_read_ulong(&tb_ptr);
  1050. #ifdef CONFIG_SOFTMMU
  1051. tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
  1052. #else
  1053. host_addr = (tcg_target_ulong)taddr;
  1054. assert(taddr == host_addr);
  1055. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1056. #endif
  1057. tci_write_reg32s(t0, tmp32);
  1058. break;
  1059. #endif /* TCG_TARGET_REG_BITS == 64 */
  1060. case INDEX_op_qemu_ld32:
  1061. t0 = *tb_ptr++;
  1062. taddr = tci_read_ulong(&tb_ptr);
  1063. #ifdef CONFIG_SOFTMMU
  1064. tmp32 = __ldl_mmu(taddr, tci_read_i(&tb_ptr));
  1065. #else
  1066. host_addr = (tcg_target_ulong)taddr;
  1067. assert(taddr == host_addr);
  1068. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1069. #endif
  1070. tci_write_reg32(t0, tmp32);
  1071. break;
  1072. case INDEX_op_qemu_ld64:
  1073. t0 = *tb_ptr++;
  1074. #if TCG_TARGET_REG_BITS == 32
  1075. t1 = *tb_ptr++;
  1076. #endif
  1077. taddr = tci_read_ulong(&tb_ptr);
  1078. #ifdef CONFIG_SOFTMMU
  1079. tmp64 = __ldq_mmu(taddr, tci_read_i(&tb_ptr));
  1080. #else
  1081. host_addr = (tcg_target_ulong)taddr;
  1082. assert(taddr == host_addr);
  1083. tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
  1084. #endif
  1085. tci_write_reg(t0, tmp64);
  1086. #if TCG_TARGET_REG_BITS == 32
  1087. tci_write_reg(t1, tmp64 >> 32);
  1088. #endif
  1089. break;
  1090. case INDEX_op_qemu_st8:
  1091. t0 = tci_read_r8(&tb_ptr);
  1092. taddr = tci_read_ulong(&tb_ptr);
  1093. #ifdef CONFIG_SOFTMMU
  1094. t2 = tci_read_i(&tb_ptr);
  1095. __stb_mmu(taddr, t0, t2);
  1096. #else
  1097. host_addr = (tcg_target_ulong)taddr;
  1098. assert(taddr == host_addr);
  1099. *(uint8_t *)(host_addr + GUEST_BASE) = t0;
  1100. #endif
  1101. break;
  1102. case INDEX_op_qemu_st16:
  1103. t0 = tci_read_r16(&tb_ptr);
  1104. taddr = tci_read_ulong(&tb_ptr);
  1105. #ifdef CONFIG_SOFTMMU
  1106. t2 = tci_read_i(&tb_ptr);
  1107. __stw_mmu(taddr, t0, t2);
  1108. #else
  1109. host_addr = (tcg_target_ulong)taddr;
  1110. assert(taddr == host_addr);
  1111. *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
  1112. #endif
  1113. break;
  1114. case INDEX_op_qemu_st32:
  1115. t0 = tci_read_r32(&tb_ptr);
  1116. taddr = tci_read_ulong(&tb_ptr);
  1117. #ifdef CONFIG_SOFTMMU
  1118. t2 = tci_read_i(&tb_ptr);
  1119. __stl_mmu(taddr, t0, t2);
  1120. #else
  1121. host_addr = (tcg_target_ulong)taddr;
  1122. assert(taddr == host_addr);
  1123. *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
  1124. #endif
  1125. break;
  1126. case INDEX_op_qemu_st64:
  1127. tmp64 = tci_read_r64(&tb_ptr);
  1128. taddr = tci_read_ulong(&tb_ptr);
  1129. #ifdef CONFIG_SOFTMMU
  1130. t2 = tci_read_i(&tb_ptr);
  1131. __stq_mmu(taddr, tmp64, t2);
  1132. #else
  1133. host_addr = (tcg_target_ulong)taddr;
  1134. assert(taddr == host_addr);
  1135. *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
  1136. #endif
  1137. break;
  1138. default:
  1139. TODO();
  1140. break;
  1141. }
  1142. assert(tb_ptr == old_code_ptr + op_size);
  1143. }
  1144. exit:
  1145. return next_tb;
  1146. }