2
0

tci.c 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
  26. #include "tcg-op.h"
  27. /* Marker for missing code. */
  28. #define TODO() \
  29. do { \
  30. fprintf(stderr, "TODO %s:%u: %s()\n", \
  31. __FILE__, __LINE__, __func__); \
  32. tcg_abort(); \
  33. } while (0)
  34. #if MAX_OPC_PARAM_IARGS != 5
  35. # error Fix needed, number of supported input arguments changed!
  36. #endif
  37. #if TCG_TARGET_REG_BITS == 32
  38. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  39. tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong);
  43. #else
  44. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  45. tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong);
  47. #endif
  48. /* TCI can optionally use a global register variable for env. */
  49. #if !defined(AREG0)
  50. CPUArchState *env;
  51. #endif
  52. /* Targets which don't use GETPC also don't need tci_tb_ptr
  53. which makes them a little faster. */
  54. #if defined(GETPC)
  55. uintptr_t tci_tb_ptr;
  56. #endif
  57. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  58. static tcg_target_ulong tci_read_reg(TCGReg index)
  59. {
  60. assert(index < ARRAY_SIZE(tci_reg));
  61. return tci_reg[index];
  62. }
  63. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  64. static int8_t tci_read_reg8s(TCGReg index)
  65. {
  66. return (int8_t)tci_read_reg(index);
  67. }
  68. #endif
  69. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  70. static int16_t tci_read_reg16s(TCGReg index)
  71. {
  72. return (int16_t)tci_read_reg(index);
  73. }
  74. #endif
  75. #if TCG_TARGET_REG_BITS == 64
  76. static int32_t tci_read_reg32s(TCGReg index)
  77. {
  78. return (int32_t)tci_read_reg(index);
  79. }
  80. #endif
  81. static uint8_t tci_read_reg8(TCGReg index)
  82. {
  83. return (uint8_t)tci_read_reg(index);
  84. }
  85. static uint16_t tci_read_reg16(TCGReg index)
  86. {
  87. return (uint16_t)tci_read_reg(index);
  88. }
  89. static uint32_t tci_read_reg32(TCGReg index)
  90. {
  91. return (uint32_t)tci_read_reg(index);
  92. }
  93. #if TCG_TARGET_REG_BITS == 64
  94. static uint64_t tci_read_reg64(TCGReg index)
  95. {
  96. return tci_read_reg(index);
  97. }
  98. #endif
  99. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  100. {
  101. assert(index < ARRAY_SIZE(tci_reg));
  102. assert(index != TCG_AREG0);
  103. tci_reg[index] = value;
  104. }
  105. static void tci_write_reg8s(TCGReg index, int8_t value)
  106. {
  107. tci_write_reg(index, value);
  108. }
  109. static void tci_write_reg16s(TCGReg index, int16_t value)
  110. {
  111. tci_write_reg(index, value);
  112. }
  113. #if TCG_TARGET_REG_BITS == 64
  114. static void tci_write_reg32s(TCGReg index, int32_t value)
  115. {
  116. tci_write_reg(index, value);
  117. }
  118. #endif
  119. static void tci_write_reg8(TCGReg index, uint8_t value)
  120. {
  121. tci_write_reg(index, value);
  122. }
  123. static void tci_write_reg16(TCGReg index, uint16_t value)
  124. {
  125. tci_write_reg(index, value);
  126. }
  127. static void tci_write_reg32(TCGReg index, uint32_t value)
  128. {
  129. tci_write_reg(index, value);
  130. }
  131. #if TCG_TARGET_REG_BITS == 32
  132. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  133. uint64_t value)
  134. {
  135. tci_write_reg(low_index, value);
  136. tci_write_reg(high_index, value >> 32);
  137. }
  138. #elif TCG_TARGET_REG_BITS == 64
  139. static void tci_write_reg64(TCGReg index, uint64_t value)
  140. {
  141. tci_write_reg(index, value);
  142. }
  143. #endif
  144. #if TCG_TARGET_REG_BITS == 32
  145. /* Create a 64 bit value from two 32 bit values. */
  146. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  147. {
  148. return ((uint64_t)high << 32) + low;
  149. }
  150. #endif
  151. /* Read constant (native size) from bytecode. */
  152. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  153. {
  154. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  155. *tb_ptr += sizeof(value);
  156. return value;
  157. }
  158. /* Read constant (32 bit) from bytecode. */
  159. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  160. {
  161. uint32_t value = *(uint32_t *)(*tb_ptr);
  162. *tb_ptr += sizeof(value);
  163. return value;
  164. }
  165. #if TCG_TARGET_REG_BITS == 64
  166. /* Read constant (64 bit) from bytecode. */
  167. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  168. {
  169. uint64_t value = *(uint64_t *)(*tb_ptr);
  170. *tb_ptr += sizeof(value);
  171. return value;
  172. }
  173. #endif
  174. /* Read indexed register (native size) from bytecode. */
  175. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  176. {
  177. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  178. *tb_ptr += 1;
  179. return value;
  180. }
  181. /* Read indexed register (8 bit) from bytecode. */
  182. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  183. {
  184. uint8_t value = tci_read_reg8(**tb_ptr);
  185. *tb_ptr += 1;
  186. return value;
  187. }
  188. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  189. /* Read indexed register (8 bit signed) from bytecode. */
  190. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  191. {
  192. int8_t value = tci_read_reg8s(**tb_ptr);
  193. *tb_ptr += 1;
  194. return value;
  195. }
  196. #endif
  197. /* Read indexed register (16 bit) from bytecode. */
  198. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  199. {
  200. uint16_t value = tci_read_reg16(**tb_ptr);
  201. *tb_ptr += 1;
  202. return value;
  203. }
  204. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  205. /* Read indexed register (16 bit signed) from bytecode. */
  206. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  207. {
  208. int16_t value = tci_read_reg16s(**tb_ptr);
  209. *tb_ptr += 1;
  210. return value;
  211. }
  212. #endif
  213. /* Read indexed register (32 bit) from bytecode. */
  214. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  215. {
  216. uint32_t value = tci_read_reg32(**tb_ptr);
  217. *tb_ptr += 1;
  218. return value;
  219. }
  220. #if TCG_TARGET_REG_BITS == 32
  221. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  222. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  223. {
  224. uint32_t low = tci_read_r32(tb_ptr);
  225. return tci_uint64(tci_read_r32(tb_ptr), low);
  226. }
  227. #elif TCG_TARGET_REG_BITS == 64
  228. /* Read indexed register (32 bit signed) from bytecode. */
  229. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  230. {
  231. int32_t value = tci_read_reg32s(**tb_ptr);
  232. *tb_ptr += 1;
  233. return value;
  234. }
  235. /* Read indexed register (64 bit) from bytecode. */
  236. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  237. {
  238. uint64_t value = tci_read_reg64(**tb_ptr);
  239. *tb_ptr += 1;
  240. return value;
  241. }
  242. #endif
  243. /* Read indexed register(s) with target address from bytecode. */
  244. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  245. {
  246. target_ulong taddr = tci_read_r(tb_ptr);
  247. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  248. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  249. #endif
  250. return taddr;
  251. }
  252. /* Read indexed register or constant (native size) from bytecode. */
  253. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  254. {
  255. tcg_target_ulong value;
  256. TCGReg r = **tb_ptr;
  257. *tb_ptr += 1;
  258. if (r == TCG_CONST) {
  259. value = tci_read_i(tb_ptr);
  260. } else {
  261. value = tci_read_reg(r);
  262. }
  263. return value;
  264. }
  265. /* Read indexed register or constant (32 bit) from bytecode. */
  266. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  267. {
  268. uint32_t value;
  269. TCGReg r = **tb_ptr;
  270. *tb_ptr += 1;
  271. if (r == TCG_CONST) {
  272. value = tci_read_i32(tb_ptr);
  273. } else {
  274. value = tci_read_reg32(r);
  275. }
  276. return value;
  277. }
  278. #if TCG_TARGET_REG_BITS == 32
  279. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  280. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  281. {
  282. uint32_t low = tci_read_ri32(tb_ptr);
  283. return tci_uint64(tci_read_ri32(tb_ptr), low);
  284. }
  285. #elif TCG_TARGET_REG_BITS == 64
  286. /* Read indexed register or constant (64 bit) from bytecode. */
  287. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  288. {
  289. uint64_t value;
  290. TCGReg r = **tb_ptr;
  291. *tb_ptr += 1;
  292. if (r == TCG_CONST) {
  293. value = tci_read_i64(tb_ptr);
  294. } else {
  295. value = tci_read_reg64(r);
  296. }
  297. return value;
  298. }
  299. #endif
  300. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  301. {
  302. tcg_target_ulong label = tci_read_i(tb_ptr);
  303. assert(label != 0);
  304. return label;
  305. }
  306. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  307. {
  308. bool result = false;
  309. int32_t i0 = u0;
  310. int32_t i1 = u1;
  311. switch (condition) {
  312. case TCG_COND_EQ:
  313. result = (u0 == u1);
  314. break;
  315. case TCG_COND_NE:
  316. result = (u0 != u1);
  317. break;
  318. case TCG_COND_LT:
  319. result = (i0 < i1);
  320. break;
  321. case TCG_COND_GE:
  322. result = (i0 >= i1);
  323. break;
  324. case TCG_COND_LE:
  325. result = (i0 <= i1);
  326. break;
  327. case TCG_COND_GT:
  328. result = (i0 > i1);
  329. break;
  330. case TCG_COND_LTU:
  331. result = (u0 < u1);
  332. break;
  333. case TCG_COND_GEU:
  334. result = (u0 >= u1);
  335. break;
  336. case TCG_COND_LEU:
  337. result = (u0 <= u1);
  338. break;
  339. case TCG_COND_GTU:
  340. result = (u0 > u1);
  341. break;
  342. default:
  343. TODO();
  344. }
  345. return result;
  346. }
  347. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  348. {
  349. bool result = false;
  350. int64_t i0 = u0;
  351. int64_t i1 = u1;
  352. switch (condition) {
  353. case TCG_COND_EQ:
  354. result = (u0 == u1);
  355. break;
  356. case TCG_COND_NE:
  357. result = (u0 != u1);
  358. break;
  359. case TCG_COND_LT:
  360. result = (i0 < i1);
  361. break;
  362. case TCG_COND_GE:
  363. result = (i0 >= i1);
  364. break;
  365. case TCG_COND_LE:
  366. result = (i0 <= i1);
  367. break;
  368. case TCG_COND_GT:
  369. result = (i0 > i1);
  370. break;
  371. case TCG_COND_LTU:
  372. result = (u0 < u1);
  373. break;
  374. case TCG_COND_GEU:
  375. result = (u0 >= u1);
  376. break;
  377. case TCG_COND_LEU:
  378. result = (u0 <= u1);
  379. break;
  380. case TCG_COND_GTU:
  381. result = (u0 > u1);
  382. break;
  383. default:
  384. TODO();
  385. }
  386. return result;
  387. }
  388. /* Interpret pseudo code in tb. */
  389. tcg_target_ulong tcg_qemu_tb_exec(CPUArchState *cpustate, uint8_t *tb_ptr)
  390. {
  391. tcg_target_ulong next_tb = 0;
  392. env = cpustate;
  393. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  394. assert(tb_ptr);
  395. for (;;) {
  396. #if defined(GETPC)
  397. tci_tb_ptr = (uintptr_t)tb_ptr;
  398. #endif
  399. TCGOpcode opc = tb_ptr[0];
  400. #if !defined(NDEBUG)
  401. uint8_t op_size = tb_ptr[1];
  402. uint8_t *old_code_ptr = tb_ptr;
  403. #endif
  404. tcg_target_ulong t0;
  405. tcg_target_ulong t1;
  406. tcg_target_ulong t2;
  407. tcg_target_ulong label;
  408. TCGCond condition;
  409. target_ulong taddr;
  410. #ifndef CONFIG_SOFTMMU
  411. tcg_target_ulong host_addr;
  412. #endif
  413. uint8_t tmp8;
  414. uint16_t tmp16;
  415. uint32_t tmp32;
  416. uint64_t tmp64;
  417. #if TCG_TARGET_REG_BITS == 32
  418. uint64_t v64;
  419. #endif
  420. /* Skip opcode and size entry. */
  421. tb_ptr += 2;
  422. switch (opc) {
  423. case INDEX_op_end:
  424. case INDEX_op_nop:
  425. break;
  426. case INDEX_op_nop1:
  427. case INDEX_op_nop2:
  428. case INDEX_op_nop3:
  429. case INDEX_op_nopn:
  430. case INDEX_op_discard:
  431. TODO();
  432. break;
  433. case INDEX_op_set_label:
  434. TODO();
  435. break;
  436. case INDEX_op_call:
  437. t0 = tci_read_ri(&tb_ptr);
  438. #if TCG_TARGET_REG_BITS == 32
  439. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  440. tci_read_reg(TCG_REG_R1),
  441. tci_read_reg(TCG_REG_R2),
  442. tci_read_reg(TCG_REG_R3),
  443. tci_read_reg(TCG_REG_R5),
  444. tci_read_reg(TCG_REG_R6),
  445. tci_read_reg(TCG_REG_R7),
  446. tci_read_reg(TCG_REG_R8),
  447. tci_read_reg(TCG_REG_R9),
  448. tci_read_reg(TCG_REG_R10));
  449. tci_write_reg(TCG_REG_R0, tmp64);
  450. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  451. #else
  452. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  453. tci_read_reg(TCG_REG_R1),
  454. tci_read_reg(TCG_REG_R2),
  455. tci_read_reg(TCG_REG_R3),
  456. tci_read_reg(TCG_REG_R5));
  457. tci_write_reg(TCG_REG_R0, tmp64);
  458. #endif
  459. break;
  460. case INDEX_op_br:
  461. label = tci_read_label(&tb_ptr);
  462. assert(tb_ptr == old_code_ptr + op_size);
  463. tb_ptr = (uint8_t *)label;
  464. continue;
  465. case INDEX_op_setcond_i32:
  466. t0 = *tb_ptr++;
  467. t1 = tci_read_r32(&tb_ptr);
  468. t2 = tci_read_ri32(&tb_ptr);
  469. condition = *tb_ptr++;
  470. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  471. break;
  472. #if TCG_TARGET_REG_BITS == 32
  473. case INDEX_op_setcond2_i32:
  474. t0 = *tb_ptr++;
  475. tmp64 = tci_read_r64(&tb_ptr);
  476. v64 = tci_read_ri64(&tb_ptr);
  477. condition = *tb_ptr++;
  478. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  479. break;
  480. #elif TCG_TARGET_REG_BITS == 64
  481. case INDEX_op_setcond_i64:
  482. t0 = *tb_ptr++;
  483. t1 = tci_read_r64(&tb_ptr);
  484. t2 = tci_read_ri64(&tb_ptr);
  485. condition = *tb_ptr++;
  486. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  487. break;
  488. #endif
  489. case INDEX_op_mov_i32:
  490. t0 = *tb_ptr++;
  491. t1 = tci_read_r32(&tb_ptr);
  492. tci_write_reg32(t0, t1);
  493. break;
  494. case INDEX_op_movi_i32:
  495. t0 = *tb_ptr++;
  496. t1 = tci_read_i32(&tb_ptr);
  497. tci_write_reg32(t0, t1);
  498. break;
  499. /* Load/store operations (32 bit). */
  500. case INDEX_op_ld8u_i32:
  501. t0 = *tb_ptr++;
  502. t1 = tci_read_r(&tb_ptr);
  503. t2 = tci_read_i32(&tb_ptr);
  504. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  505. break;
  506. case INDEX_op_ld8s_i32:
  507. case INDEX_op_ld16u_i32:
  508. TODO();
  509. break;
  510. case INDEX_op_ld16s_i32:
  511. TODO();
  512. break;
  513. case INDEX_op_ld_i32:
  514. t0 = *tb_ptr++;
  515. t1 = tci_read_r(&tb_ptr);
  516. t2 = tci_read_i32(&tb_ptr);
  517. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  518. break;
  519. case INDEX_op_st8_i32:
  520. t0 = tci_read_r8(&tb_ptr);
  521. t1 = tci_read_r(&tb_ptr);
  522. t2 = tci_read_i32(&tb_ptr);
  523. *(uint8_t *)(t1 + t2) = t0;
  524. break;
  525. case INDEX_op_st16_i32:
  526. t0 = tci_read_r16(&tb_ptr);
  527. t1 = tci_read_r(&tb_ptr);
  528. t2 = tci_read_i32(&tb_ptr);
  529. *(uint16_t *)(t1 + t2) = t0;
  530. break;
  531. case INDEX_op_st_i32:
  532. t0 = tci_read_r32(&tb_ptr);
  533. t1 = tci_read_r(&tb_ptr);
  534. t2 = tci_read_i32(&tb_ptr);
  535. *(uint32_t *)(t1 + t2) = t0;
  536. break;
  537. /* Arithmetic operations (32 bit). */
  538. case INDEX_op_add_i32:
  539. t0 = *tb_ptr++;
  540. t1 = tci_read_ri32(&tb_ptr);
  541. t2 = tci_read_ri32(&tb_ptr);
  542. tci_write_reg32(t0, t1 + t2);
  543. break;
  544. case INDEX_op_sub_i32:
  545. t0 = *tb_ptr++;
  546. t1 = tci_read_ri32(&tb_ptr);
  547. t2 = tci_read_ri32(&tb_ptr);
  548. tci_write_reg32(t0, t1 - t2);
  549. break;
  550. case INDEX_op_mul_i32:
  551. t0 = *tb_ptr++;
  552. t1 = tci_read_ri32(&tb_ptr);
  553. t2 = tci_read_ri32(&tb_ptr);
  554. tci_write_reg32(t0, t1 * t2);
  555. break;
  556. #if TCG_TARGET_HAS_div_i32
  557. case INDEX_op_div_i32:
  558. t0 = *tb_ptr++;
  559. t1 = tci_read_ri32(&tb_ptr);
  560. t2 = tci_read_ri32(&tb_ptr);
  561. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  562. break;
  563. case INDEX_op_divu_i32:
  564. t0 = *tb_ptr++;
  565. t1 = tci_read_ri32(&tb_ptr);
  566. t2 = tci_read_ri32(&tb_ptr);
  567. tci_write_reg32(t0, t1 / t2);
  568. break;
  569. case INDEX_op_rem_i32:
  570. t0 = *tb_ptr++;
  571. t1 = tci_read_ri32(&tb_ptr);
  572. t2 = tci_read_ri32(&tb_ptr);
  573. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  574. break;
  575. case INDEX_op_remu_i32:
  576. t0 = *tb_ptr++;
  577. t1 = tci_read_ri32(&tb_ptr);
  578. t2 = tci_read_ri32(&tb_ptr);
  579. tci_write_reg32(t0, t1 % t2);
  580. break;
  581. #elif TCG_TARGET_HAS_div2_i32
  582. case INDEX_op_div2_i32:
  583. case INDEX_op_divu2_i32:
  584. TODO();
  585. break;
  586. #endif
  587. case INDEX_op_and_i32:
  588. t0 = *tb_ptr++;
  589. t1 = tci_read_ri32(&tb_ptr);
  590. t2 = tci_read_ri32(&tb_ptr);
  591. tci_write_reg32(t0, t1 & t2);
  592. break;
  593. case INDEX_op_or_i32:
  594. t0 = *tb_ptr++;
  595. t1 = tci_read_ri32(&tb_ptr);
  596. t2 = tci_read_ri32(&tb_ptr);
  597. tci_write_reg32(t0, t1 | t2);
  598. break;
  599. case INDEX_op_xor_i32:
  600. t0 = *tb_ptr++;
  601. t1 = tci_read_ri32(&tb_ptr);
  602. t2 = tci_read_ri32(&tb_ptr);
  603. tci_write_reg32(t0, t1 ^ t2);
  604. break;
  605. /* Shift/rotate operations (32 bit). */
  606. case INDEX_op_shl_i32:
  607. t0 = *tb_ptr++;
  608. t1 = tci_read_ri32(&tb_ptr);
  609. t2 = tci_read_ri32(&tb_ptr);
  610. tci_write_reg32(t0, t1 << t2);
  611. break;
  612. case INDEX_op_shr_i32:
  613. t0 = *tb_ptr++;
  614. t1 = tci_read_ri32(&tb_ptr);
  615. t2 = tci_read_ri32(&tb_ptr);
  616. tci_write_reg32(t0, t1 >> t2);
  617. break;
  618. case INDEX_op_sar_i32:
  619. t0 = *tb_ptr++;
  620. t1 = tci_read_ri32(&tb_ptr);
  621. t2 = tci_read_ri32(&tb_ptr);
  622. tci_write_reg32(t0, ((int32_t)t1 >> t2));
  623. break;
  624. #if TCG_TARGET_HAS_rot_i32
  625. case INDEX_op_rotl_i32:
  626. t0 = *tb_ptr++;
  627. t1 = tci_read_ri32(&tb_ptr);
  628. t2 = tci_read_ri32(&tb_ptr);
  629. tci_write_reg32(t0, (t1 << t2) | (t1 >> (32 - t2)));
  630. break;
  631. case INDEX_op_rotr_i32:
  632. t0 = *tb_ptr++;
  633. t1 = tci_read_ri32(&tb_ptr);
  634. t2 = tci_read_ri32(&tb_ptr);
  635. tci_write_reg32(t0, (t1 >> t2) | (t1 << (32 - t2)));
  636. break;
  637. #endif
  638. #if TCG_TARGET_HAS_deposit_i32
  639. case INDEX_op_deposit_i32:
  640. t0 = *tb_ptr++;
  641. t1 = tci_read_r32(&tb_ptr);
  642. t2 = tci_read_r32(&tb_ptr);
  643. tmp16 = *tb_ptr++;
  644. tmp8 = *tb_ptr++;
  645. tmp32 = (((1 << tmp8) - 1) << tmp16);
  646. tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
  647. break;
  648. #endif
  649. case INDEX_op_brcond_i32:
  650. t0 = tci_read_r32(&tb_ptr);
  651. t1 = tci_read_ri32(&tb_ptr);
  652. condition = *tb_ptr++;
  653. label = tci_read_label(&tb_ptr);
  654. if (tci_compare32(t0, t1, condition)) {
  655. assert(tb_ptr == old_code_ptr + op_size);
  656. tb_ptr = (uint8_t *)label;
  657. continue;
  658. }
  659. break;
  660. #if TCG_TARGET_REG_BITS == 32
  661. case INDEX_op_add2_i32:
  662. t0 = *tb_ptr++;
  663. t1 = *tb_ptr++;
  664. tmp64 = tci_read_r64(&tb_ptr);
  665. tmp64 += tci_read_r64(&tb_ptr);
  666. tci_write_reg64(t1, t0, tmp64);
  667. break;
  668. case INDEX_op_sub2_i32:
  669. t0 = *tb_ptr++;
  670. t1 = *tb_ptr++;
  671. tmp64 = tci_read_r64(&tb_ptr);
  672. tmp64 -= tci_read_r64(&tb_ptr);
  673. tci_write_reg64(t1, t0, tmp64);
  674. break;
  675. case INDEX_op_brcond2_i32:
  676. tmp64 = tci_read_r64(&tb_ptr);
  677. v64 = tci_read_ri64(&tb_ptr);
  678. condition = *tb_ptr++;
  679. label = tci_read_label(&tb_ptr);
  680. if (tci_compare64(tmp64, v64, condition)) {
  681. assert(tb_ptr == old_code_ptr + op_size);
  682. tb_ptr = (uint8_t *)label;
  683. continue;
  684. }
  685. break;
  686. case INDEX_op_mulu2_i32:
  687. t0 = *tb_ptr++;
  688. t1 = *tb_ptr++;
  689. t2 = tci_read_r32(&tb_ptr);
  690. tmp64 = tci_read_r32(&tb_ptr);
  691. tci_write_reg64(t1, t0, t2 * tmp64);
  692. break;
  693. #endif /* TCG_TARGET_REG_BITS == 32 */
  694. #if TCG_TARGET_HAS_ext8s_i32
  695. case INDEX_op_ext8s_i32:
  696. t0 = *tb_ptr++;
  697. t1 = tci_read_r8s(&tb_ptr);
  698. tci_write_reg32(t0, t1);
  699. break;
  700. #endif
  701. #if TCG_TARGET_HAS_ext16s_i32
  702. case INDEX_op_ext16s_i32:
  703. t0 = *tb_ptr++;
  704. t1 = tci_read_r16s(&tb_ptr);
  705. tci_write_reg32(t0, t1);
  706. break;
  707. #endif
  708. #if TCG_TARGET_HAS_ext8u_i32
  709. case INDEX_op_ext8u_i32:
  710. t0 = *tb_ptr++;
  711. t1 = tci_read_r8(&tb_ptr);
  712. tci_write_reg32(t0, t1);
  713. break;
  714. #endif
  715. #if TCG_TARGET_HAS_ext16u_i32
  716. case INDEX_op_ext16u_i32:
  717. t0 = *tb_ptr++;
  718. t1 = tci_read_r16(&tb_ptr);
  719. tci_write_reg32(t0, t1);
  720. break;
  721. #endif
  722. #if TCG_TARGET_HAS_bswap16_i32
  723. case INDEX_op_bswap16_i32:
  724. t0 = *tb_ptr++;
  725. t1 = tci_read_r16(&tb_ptr);
  726. tci_write_reg32(t0, bswap16(t1));
  727. break;
  728. #endif
  729. #if TCG_TARGET_HAS_bswap32_i32
  730. case INDEX_op_bswap32_i32:
  731. t0 = *tb_ptr++;
  732. t1 = tci_read_r32(&tb_ptr);
  733. tci_write_reg32(t0, bswap32(t1));
  734. break;
  735. #endif
  736. #if TCG_TARGET_HAS_not_i32
  737. case INDEX_op_not_i32:
  738. t0 = *tb_ptr++;
  739. t1 = tci_read_r32(&tb_ptr);
  740. tci_write_reg32(t0, ~t1);
  741. break;
  742. #endif
  743. #if TCG_TARGET_HAS_neg_i32
  744. case INDEX_op_neg_i32:
  745. t0 = *tb_ptr++;
  746. t1 = tci_read_r32(&tb_ptr);
  747. tci_write_reg32(t0, -t1);
  748. break;
  749. #endif
  750. #if TCG_TARGET_REG_BITS == 64
  751. case INDEX_op_mov_i64:
  752. t0 = *tb_ptr++;
  753. t1 = tci_read_r64(&tb_ptr);
  754. tci_write_reg64(t0, t1);
  755. break;
  756. case INDEX_op_movi_i64:
  757. t0 = *tb_ptr++;
  758. t1 = tci_read_i64(&tb_ptr);
  759. tci_write_reg64(t0, t1);
  760. break;
  761. /* Load/store operations (64 bit). */
  762. case INDEX_op_ld8u_i64:
  763. t0 = *tb_ptr++;
  764. t1 = tci_read_r(&tb_ptr);
  765. t2 = tci_read_i32(&tb_ptr);
  766. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  767. break;
  768. case INDEX_op_ld8s_i64:
  769. case INDEX_op_ld16u_i64:
  770. case INDEX_op_ld16s_i64:
  771. TODO();
  772. break;
  773. case INDEX_op_ld32u_i64:
  774. t0 = *tb_ptr++;
  775. t1 = tci_read_r(&tb_ptr);
  776. t2 = tci_read_i32(&tb_ptr);
  777. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  778. break;
  779. case INDEX_op_ld32s_i64:
  780. t0 = *tb_ptr++;
  781. t1 = tci_read_r(&tb_ptr);
  782. t2 = tci_read_i32(&tb_ptr);
  783. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  784. break;
  785. case INDEX_op_ld_i64:
  786. t0 = *tb_ptr++;
  787. t1 = tci_read_r(&tb_ptr);
  788. t2 = tci_read_i32(&tb_ptr);
  789. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  790. break;
  791. case INDEX_op_st8_i64:
  792. t0 = tci_read_r8(&tb_ptr);
  793. t1 = tci_read_r(&tb_ptr);
  794. t2 = tci_read_i32(&tb_ptr);
  795. *(uint8_t *)(t1 + t2) = t0;
  796. break;
  797. case INDEX_op_st16_i64:
  798. t0 = tci_read_r16(&tb_ptr);
  799. t1 = tci_read_r(&tb_ptr);
  800. t2 = tci_read_i32(&tb_ptr);
  801. *(uint16_t *)(t1 + t2) = t0;
  802. break;
  803. case INDEX_op_st32_i64:
  804. t0 = tci_read_r32(&tb_ptr);
  805. t1 = tci_read_r(&tb_ptr);
  806. t2 = tci_read_i32(&tb_ptr);
  807. *(uint32_t *)(t1 + t2) = t0;
  808. break;
  809. case INDEX_op_st_i64:
  810. t0 = tci_read_r64(&tb_ptr);
  811. t1 = tci_read_r(&tb_ptr);
  812. t2 = tci_read_i32(&tb_ptr);
  813. *(uint64_t *)(t1 + t2) = t0;
  814. break;
  815. /* Arithmetic operations (64 bit). */
  816. case INDEX_op_add_i64:
  817. t0 = *tb_ptr++;
  818. t1 = tci_read_ri64(&tb_ptr);
  819. t2 = tci_read_ri64(&tb_ptr);
  820. tci_write_reg64(t0, t1 + t2);
  821. break;
  822. case INDEX_op_sub_i64:
  823. t0 = *tb_ptr++;
  824. t1 = tci_read_ri64(&tb_ptr);
  825. t2 = tci_read_ri64(&tb_ptr);
  826. tci_write_reg64(t0, t1 - t2);
  827. break;
  828. case INDEX_op_mul_i64:
  829. t0 = *tb_ptr++;
  830. t1 = tci_read_ri64(&tb_ptr);
  831. t2 = tci_read_ri64(&tb_ptr);
  832. tci_write_reg64(t0, t1 * t2);
  833. break;
  834. #if TCG_TARGET_HAS_div_i64
  835. case INDEX_op_div_i64:
  836. case INDEX_op_divu_i64:
  837. case INDEX_op_rem_i64:
  838. case INDEX_op_remu_i64:
  839. TODO();
  840. break;
  841. #elif TCG_TARGET_HAS_div2_i64
  842. case INDEX_op_div2_i64:
  843. case INDEX_op_divu2_i64:
  844. TODO();
  845. break;
  846. #endif
  847. case INDEX_op_and_i64:
  848. t0 = *tb_ptr++;
  849. t1 = tci_read_ri64(&tb_ptr);
  850. t2 = tci_read_ri64(&tb_ptr);
  851. tci_write_reg64(t0, t1 & t2);
  852. break;
  853. case INDEX_op_or_i64:
  854. t0 = *tb_ptr++;
  855. t1 = tci_read_ri64(&tb_ptr);
  856. t2 = tci_read_ri64(&tb_ptr);
  857. tci_write_reg64(t0, t1 | t2);
  858. break;
  859. case INDEX_op_xor_i64:
  860. t0 = *tb_ptr++;
  861. t1 = tci_read_ri64(&tb_ptr);
  862. t2 = tci_read_ri64(&tb_ptr);
  863. tci_write_reg64(t0, t1 ^ t2);
  864. break;
  865. /* Shift/rotate operations (64 bit). */
  866. case INDEX_op_shl_i64:
  867. t0 = *tb_ptr++;
  868. t1 = tci_read_ri64(&tb_ptr);
  869. t2 = tci_read_ri64(&tb_ptr);
  870. tci_write_reg64(t0, t1 << t2);
  871. break;
  872. case INDEX_op_shr_i64:
  873. t0 = *tb_ptr++;
  874. t1 = tci_read_ri64(&tb_ptr);
  875. t2 = tci_read_ri64(&tb_ptr);
  876. tci_write_reg64(t0, t1 >> t2);
  877. break;
  878. case INDEX_op_sar_i64:
  879. t0 = *tb_ptr++;
  880. t1 = tci_read_ri64(&tb_ptr);
  881. t2 = tci_read_ri64(&tb_ptr);
  882. tci_write_reg64(t0, ((int64_t)t1 >> t2));
  883. break;
  884. #if TCG_TARGET_HAS_rot_i64
  885. case INDEX_op_rotl_i64:
  886. case INDEX_op_rotr_i64:
  887. TODO();
  888. break;
  889. #endif
  890. #if TCG_TARGET_HAS_deposit_i64
  891. case INDEX_op_deposit_i64:
  892. t0 = *tb_ptr++;
  893. t1 = tci_read_r64(&tb_ptr);
  894. t2 = tci_read_r64(&tb_ptr);
  895. tmp16 = *tb_ptr++;
  896. tmp8 = *tb_ptr++;
  897. tmp64 = (((1ULL << tmp8) - 1) << tmp16);
  898. tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
  899. break;
  900. #endif
  901. case INDEX_op_brcond_i64:
  902. t0 = tci_read_r64(&tb_ptr);
  903. t1 = tci_read_ri64(&tb_ptr);
  904. condition = *tb_ptr++;
  905. label = tci_read_label(&tb_ptr);
  906. if (tci_compare64(t0, t1, condition)) {
  907. assert(tb_ptr == old_code_ptr + op_size);
  908. tb_ptr = (uint8_t *)label;
  909. continue;
  910. }
  911. break;
  912. #if TCG_TARGET_HAS_ext8u_i64
  913. case INDEX_op_ext8u_i64:
  914. t0 = *tb_ptr++;
  915. t1 = tci_read_r8(&tb_ptr);
  916. tci_write_reg64(t0, t1);
  917. break;
  918. #endif
  919. #if TCG_TARGET_HAS_ext8s_i64
  920. case INDEX_op_ext8s_i64:
  921. t0 = *tb_ptr++;
  922. t1 = tci_read_r8s(&tb_ptr);
  923. tci_write_reg64(t0, t1);
  924. break;
  925. #endif
  926. #if TCG_TARGET_HAS_ext16s_i64
  927. case INDEX_op_ext16s_i64:
  928. t0 = *tb_ptr++;
  929. t1 = tci_read_r16s(&tb_ptr);
  930. tci_write_reg64(t0, t1);
  931. break;
  932. #endif
  933. #if TCG_TARGET_HAS_ext16u_i64
  934. case INDEX_op_ext16u_i64:
  935. t0 = *tb_ptr++;
  936. t1 = tci_read_r16(&tb_ptr);
  937. tci_write_reg64(t0, t1);
  938. break;
  939. #endif
  940. #if TCG_TARGET_HAS_ext32s_i64
  941. case INDEX_op_ext32s_i64:
  942. t0 = *tb_ptr++;
  943. t1 = tci_read_r32s(&tb_ptr);
  944. tci_write_reg64(t0, t1);
  945. break;
  946. #endif
  947. #if TCG_TARGET_HAS_ext32u_i64
  948. case INDEX_op_ext32u_i64:
  949. t0 = *tb_ptr++;
  950. t1 = tci_read_r32(&tb_ptr);
  951. tci_write_reg64(t0, t1);
  952. break;
  953. #endif
  954. #if TCG_TARGET_HAS_bswap16_i64
  955. case INDEX_op_bswap16_i64:
  956. TODO();
  957. t0 = *tb_ptr++;
  958. t1 = tci_read_r16(&tb_ptr);
  959. tci_write_reg64(t0, bswap16(t1));
  960. break;
  961. #endif
  962. #if TCG_TARGET_HAS_bswap32_i64
  963. case INDEX_op_bswap32_i64:
  964. t0 = *tb_ptr++;
  965. t1 = tci_read_r32(&tb_ptr);
  966. tci_write_reg64(t0, bswap32(t1));
  967. break;
  968. #endif
  969. #if TCG_TARGET_HAS_bswap64_i64
  970. case INDEX_op_bswap64_i64:
  971. t0 = *tb_ptr++;
  972. t1 = tci_read_r64(&tb_ptr);
  973. tci_write_reg64(t0, bswap64(t1));
  974. break;
  975. #endif
  976. #if TCG_TARGET_HAS_not_i64
  977. case INDEX_op_not_i64:
  978. t0 = *tb_ptr++;
  979. t1 = tci_read_r64(&tb_ptr);
  980. tci_write_reg64(t0, ~t1);
  981. break;
  982. #endif
  983. #if TCG_TARGET_HAS_neg_i64
  984. case INDEX_op_neg_i64:
  985. t0 = *tb_ptr++;
  986. t1 = tci_read_r64(&tb_ptr);
  987. tci_write_reg64(t0, -t1);
  988. break;
  989. #endif
  990. #endif /* TCG_TARGET_REG_BITS == 64 */
  991. /* QEMU specific operations. */
  992. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  993. case INDEX_op_debug_insn_start:
  994. TODO();
  995. break;
  996. #else
  997. case INDEX_op_debug_insn_start:
  998. TODO();
  999. break;
  1000. #endif
  1001. case INDEX_op_exit_tb:
  1002. next_tb = *(uint64_t *)tb_ptr;
  1003. goto exit;
  1004. break;
  1005. case INDEX_op_goto_tb:
  1006. t0 = tci_read_i32(&tb_ptr);
  1007. assert(tb_ptr == old_code_ptr + op_size);
  1008. tb_ptr += (int32_t)t0;
  1009. continue;
  1010. case INDEX_op_qemu_ld8u:
  1011. t0 = *tb_ptr++;
  1012. taddr = tci_read_ulong(&tb_ptr);
  1013. #ifdef CONFIG_SOFTMMU
  1014. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1015. #else
  1016. host_addr = (tcg_target_ulong)taddr;
  1017. assert(taddr == host_addr);
  1018. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1019. #endif
  1020. tci_write_reg8(t0, tmp8);
  1021. break;
  1022. case INDEX_op_qemu_ld8s:
  1023. t0 = *tb_ptr++;
  1024. taddr = tci_read_ulong(&tb_ptr);
  1025. #ifdef CONFIG_SOFTMMU
  1026. tmp8 = helper_ldb_mmu(env, taddr, tci_read_i(&tb_ptr));
  1027. #else
  1028. host_addr = (tcg_target_ulong)taddr;
  1029. assert(taddr == host_addr);
  1030. tmp8 = *(uint8_t *)(host_addr + GUEST_BASE);
  1031. #endif
  1032. tci_write_reg8s(t0, tmp8);
  1033. break;
  1034. case INDEX_op_qemu_ld16u:
  1035. t0 = *tb_ptr++;
  1036. taddr = tci_read_ulong(&tb_ptr);
  1037. #ifdef CONFIG_SOFTMMU
  1038. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1039. #else
  1040. host_addr = (tcg_target_ulong)taddr;
  1041. assert(taddr == host_addr);
  1042. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1043. #endif
  1044. tci_write_reg16(t0, tmp16);
  1045. break;
  1046. case INDEX_op_qemu_ld16s:
  1047. t0 = *tb_ptr++;
  1048. taddr = tci_read_ulong(&tb_ptr);
  1049. #ifdef CONFIG_SOFTMMU
  1050. tmp16 = helper_ldw_mmu(env, taddr, tci_read_i(&tb_ptr));
  1051. #else
  1052. host_addr = (tcg_target_ulong)taddr;
  1053. assert(taddr == host_addr);
  1054. tmp16 = tswap16(*(uint16_t *)(host_addr + GUEST_BASE));
  1055. #endif
  1056. tci_write_reg16s(t0, tmp16);
  1057. break;
  1058. #if TCG_TARGET_REG_BITS == 64
  1059. case INDEX_op_qemu_ld32u:
  1060. t0 = *tb_ptr++;
  1061. taddr = tci_read_ulong(&tb_ptr);
  1062. #ifdef CONFIG_SOFTMMU
  1063. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1064. #else
  1065. host_addr = (tcg_target_ulong)taddr;
  1066. assert(taddr == host_addr);
  1067. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1068. #endif
  1069. tci_write_reg32(t0, tmp32);
  1070. break;
  1071. case INDEX_op_qemu_ld32s:
  1072. t0 = *tb_ptr++;
  1073. taddr = tci_read_ulong(&tb_ptr);
  1074. #ifdef CONFIG_SOFTMMU
  1075. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1076. #else
  1077. host_addr = (tcg_target_ulong)taddr;
  1078. assert(taddr == host_addr);
  1079. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1080. #endif
  1081. tci_write_reg32s(t0, tmp32);
  1082. break;
  1083. #endif /* TCG_TARGET_REG_BITS == 64 */
  1084. case INDEX_op_qemu_ld32:
  1085. t0 = *tb_ptr++;
  1086. taddr = tci_read_ulong(&tb_ptr);
  1087. #ifdef CONFIG_SOFTMMU
  1088. tmp32 = helper_ldl_mmu(env, taddr, tci_read_i(&tb_ptr));
  1089. #else
  1090. host_addr = (tcg_target_ulong)taddr;
  1091. assert(taddr == host_addr);
  1092. tmp32 = tswap32(*(uint32_t *)(host_addr + GUEST_BASE));
  1093. #endif
  1094. tci_write_reg32(t0, tmp32);
  1095. break;
  1096. case INDEX_op_qemu_ld64:
  1097. t0 = *tb_ptr++;
  1098. #if TCG_TARGET_REG_BITS == 32
  1099. t1 = *tb_ptr++;
  1100. #endif
  1101. taddr = tci_read_ulong(&tb_ptr);
  1102. #ifdef CONFIG_SOFTMMU
  1103. tmp64 = helper_ldq_mmu(env, taddr, tci_read_i(&tb_ptr));
  1104. #else
  1105. host_addr = (tcg_target_ulong)taddr;
  1106. assert(taddr == host_addr);
  1107. tmp64 = tswap64(*(uint64_t *)(host_addr + GUEST_BASE));
  1108. #endif
  1109. tci_write_reg(t0, tmp64);
  1110. #if TCG_TARGET_REG_BITS == 32
  1111. tci_write_reg(t1, tmp64 >> 32);
  1112. #endif
  1113. break;
  1114. case INDEX_op_qemu_st8:
  1115. t0 = tci_read_r8(&tb_ptr);
  1116. taddr = tci_read_ulong(&tb_ptr);
  1117. #ifdef CONFIG_SOFTMMU
  1118. t2 = tci_read_i(&tb_ptr);
  1119. helper_stb_mmu(env, taddr, t0, t2);
  1120. #else
  1121. host_addr = (tcg_target_ulong)taddr;
  1122. assert(taddr == host_addr);
  1123. *(uint8_t *)(host_addr + GUEST_BASE) = t0;
  1124. #endif
  1125. break;
  1126. case INDEX_op_qemu_st16:
  1127. t0 = tci_read_r16(&tb_ptr);
  1128. taddr = tci_read_ulong(&tb_ptr);
  1129. #ifdef CONFIG_SOFTMMU
  1130. t2 = tci_read_i(&tb_ptr);
  1131. helper_stw_mmu(env, taddr, t0, t2);
  1132. #else
  1133. host_addr = (tcg_target_ulong)taddr;
  1134. assert(taddr == host_addr);
  1135. *(uint16_t *)(host_addr + GUEST_BASE) = tswap16(t0);
  1136. #endif
  1137. break;
  1138. case INDEX_op_qemu_st32:
  1139. t0 = tci_read_r32(&tb_ptr);
  1140. taddr = tci_read_ulong(&tb_ptr);
  1141. #ifdef CONFIG_SOFTMMU
  1142. t2 = tci_read_i(&tb_ptr);
  1143. helper_stl_mmu(env, taddr, t0, t2);
  1144. #else
  1145. host_addr = (tcg_target_ulong)taddr;
  1146. assert(taddr == host_addr);
  1147. *(uint32_t *)(host_addr + GUEST_BASE) = tswap32(t0);
  1148. #endif
  1149. break;
  1150. case INDEX_op_qemu_st64:
  1151. tmp64 = tci_read_r64(&tb_ptr);
  1152. taddr = tci_read_ulong(&tb_ptr);
  1153. #ifdef CONFIG_SOFTMMU
  1154. t2 = tci_read_i(&tb_ptr);
  1155. helper_stq_mmu(env, taddr, tmp64, t2);
  1156. #else
  1157. host_addr = (tcg_target_ulong)taddr;
  1158. assert(taddr == host_addr);
  1159. *(uint64_t *)(host_addr + GUEST_BASE) = tswap64(tmp64);
  1160. #endif
  1161. break;
  1162. default:
  1163. TODO();
  1164. break;
  1165. }
  1166. assert(tb_ptr == old_code_ptr + op_size);
  1167. }
  1168. exit:
  1169. return next_tb;
  1170. }