tci.c 37 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "config.h"
  20. /* Defining NDEBUG disables assertions (which makes the code faster). */
  21. #if !defined(CONFIG_DEBUG_TCG) && !defined(NDEBUG)
  22. # define NDEBUG
  23. #endif
  24. #include "qemu-common.h"
  25. #include "exec/exec-all.h" /* MAX_OPC_PARAM_IARGS */
  26. #include "exec/cpu_ldst.h"
  27. #include "tcg-op.h"
  28. /* Marker for missing code. */
  29. #define TODO() \
  30. do { \
  31. fprintf(stderr, "TODO %s:%u: %s()\n", \
  32. __FILE__, __LINE__, __func__); \
  33. tcg_abort(); \
  34. } while (0)
  35. #if MAX_OPC_PARAM_IARGS != 5
  36. # error Fix needed, number of supported input arguments changed!
  37. #endif
  38. #if TCG_TARGET_REG_BITS == 32
  39. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  40. tcg_target_ulong, tcg_target_ulong,
  41. tcg_target_ulong, tcg_target_ulong,
  42. tcg_target_ulong, tcg_target_ulong,
  43. tcg_target_ulong, tcg_target_ulong);
  44. #else
  45. typedef uint64_t (*helper_function)(tcg_target_ulong, tcg_target_ulong,
  46. tcg_target_ulong, tcg_target_ulong,
  47. tcg_target_ulong);
  48. #endif
  49. /* Targets which don't use GETPC also don't need tci_tb_ptr
  50. which makes them a little faster. */
  51. #if defined(GETPC)
  52. uintptr_t tci_tb_ptr;
  53. #endif
  54. static tcg_target_ulong tci_reg[TCG_TARGET_NB_REGS];
  55. static tcg_target_ulong tci_read_reg(TCGReg index)
  56. {
  57. assert(index < ARRAY_SIZE(tci_reg));
  58. return tci_reg[index];
  59. }
  60. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  61. static int8_t tci_read_reg8s(TCGReg index)
  62. {
  63. return (int8_t)tci_read_reg(index);
  64. }
  65. #endif
  66. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  67. static int16_t tci_read_reg16s(TCGReg index)
  68. {
  69. return (int16_t)tci_read_reg(index);
  70. }
  71. #endif
  72. #if TCG_TARGET_REG_BITS == 64
  73. static int32_t tci_read_reg32s(TCGReg index)
  74. {
  75. return (int32_t)tci_read_reg(index);
  76. }
  77. #endif
  78. static uint8_t tci_read_reg8(TCGReg index)
  79. {
  80. return (uint8_t)tci_read_reg(index);
  81. }
  82. static uint16_t tci_read_reg16(TCGReg index)
  83. {
  84. return (uint16_t)tci_read_reg(index);
  85. }
  86. static uint32_t tci_read_reg32(TCGReg index)
  87. {
  88. return (uint32_t)tci_read_reg(index);
  89. }
  90. #if TCG_TARGET_REG_BITS == 64
  91. static uint64_t tci_read_reg64(TCGReg index)
  92. {
  93. return tci_read_reg(index);
  94. }
  95. #endif
  96. static void tci_write_reg(TCGReg index, tcg_target_ulong value)
  97. {
  98. assert(index < ARRAY_SIZE(tci_reg));
  99. assert(index != TCG_AREG0);
  100. assert(index != TCG_REG_CALL_STACK);
  101. tci_reg[index] = value;
  102. }
  103. #if TCG_TARGET_REG_BITS == 64
  104. static void tci_write_reg32s(TCGReg index, int32_t value)
  105. {
  106. tci_write_reg(index, value);
  107. }
  108. #endif
  109. static void tci_write_reg8(TCGReg index, uint8_t value)
  110. {
  111. tci_write_reg(index, value);
  112. }
  113. static void tci_write_reg32(TCGReg index, uint32_t value)
  114. {
  115. tci_write_reg(index, value);
  116. }
  117. #if TCG_TARGET_REG_BITS == 32
  118. static void tci_write_reg64(uint32_t high_index, uint32_t low_index,
  119. uint64_t value)
  120. {
  121. tci_write_reg(low_index, value);
  122. tci_write_reg(high_index, value >> 32);
  123. }
  124. #elif TCG_TARGET_REG_BITS == 64
  125. static void tci_write_reg64(TCGReg index, uint64_t value)
  126. {
  127. tci_write_reg(index, value);
  128. }
  129. #endif
  130. #if TCG_TARGET_REG_BITS == 32
  131. /* Create a 64 bit value from two 32 bit values. */
  132. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  133. {
  134. return ((uint64_t)high << 32) + low;
  135. }
  136. #endif
  137. /* Read constant (native size) from bytecode. */
  138. static tcg_target_ulong tci_read_i(uint8_t **tb_ptr)
  139. {
  140. tcg_target_ulong value = *(tcg_target_ulong *)(*tb_ptr);
  141. *tb_ptr += sizeof(value);
  142. return value;
  143. }
  144. /* Read unsigned constant (32 bit) from bytecode. */
  145. static uint32_t tci_read_i32(uint8_t **tb_ptr)
  146. {
  147. uint32_t value = *(uint32_t *)(*tb_ptr);
  148. *tb_ptr += sizeof(value);
  149. return value;
  150. }
  151. /* Read signed constant (32 bit) from bytecode. */
  152. static int32_t tci_read_s32(uint8_t **tb_ptr)
  153. {
  154. int32_t value = *(int32_t *)(*tb_ptr);
  155. *tb_ptr += sizeof(value);
  156. return value;
  157. }
  158. #if TCG_TARGET_REG_BITS == 64
  159. /* Read constant (64 bit) from bytecode. */
  160. static uint64_t tci_read_i64(uint8_t **tb_ptr)
  161. {
  162. uint64_t value = *(uint64_t *)(*tb_ptr);
  163. *tb_ptr += sizeof(value);
  164. return value;
  165. }
  166. #endif
  167. /* Read indexed register (native size) from bytecode. */
  168. static tcg_target_ulong tci_read_r(uint8_t **tb_ptr)
  169. {
  170. tcg_target_ulong value = tci_read_reg(**tb_ptr);
  171. *tb_ptr += 1;
  172. return value;
  173. }
  174. /* Read indexed register (8 bit) from bytecode. */
  175. static uint8_t tci_read_r8(uint8_t **tb_ptr)
  176. {
  177. uint8_t value = tci_read_reg8(**tb_ptr);
  178. *tb_ptr += 1;
  179. return value;
  180. }
  181. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  182. /* Read indexed register (8 bit signed) from bytecode. */
  183. static int8_t tci_read_r8s(uint8_t **tb_ptr)
  184. {
  185. int8_t value = tci_read_reg8s(**tb_ptr);
  186. *tb_ptr += 1;
  187. return value;
  188. }
  189. #endif
  190. /* Read indexed register (16 bit) from bytecode. */
  191. static uint16_t tci_read_r16(uint8_t **tb_ptr)
  192. {
  193. uint16_t value = tci_read_reg16(**tb_ptr);
  194. *tb_ptr += 1;
  195. return value;
  196. }
  197. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64
  198. /* Read indexed register (16 bit signed) from bytecode. */
  199. static int16_t tci_read_r16s(uint8_t **tb_ptr)
  200. {
  201. int16_t value = tci_read_reg16s(**tb_ptr);
  202. *tb_ptr += 1;
  203. return value;
  204. }
  205. #endif
  206. /* Read indexed register (32 bit) from bytecode. */
  207. static uint32_t tci_read_r32(uint8_t **tb_ptr)
  208. {
  209. uint32_t value = tci_read_reg32(**tb_ptr);
  210. *tb_ptr += 1;
  211. return value;
  212. }
  213. #if TCG_TARGET_REG_BITS == 32
  214. /* Read two indexed registers (2 * 32 bit) from bytecode. */
  215. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  216. {
  217. uint32_t low = tci_read_r32(tb_ptr);
  218. return tci_uint64(tci_read_r32(tb_ptr), low);
  219. }
  220. #elif TCG_TARGET_REG_BITS == 64
  221. /* Read indexed register (32 bit signed) from bytecode. */
  222. static int32_t tci_read_r32s(uint8_t **tb_ptr)
  223. {
  224. int32_t value = tci_read_reg32s(**tb_ptr);
  225. *tb_ptr += 1;
  226. return value;
  227. }
  228. /* Read indexed register (64 bit) from bytecode. */
  229. static uint64_t tci_read_r64(uint8_t **tb_ptr)
  230. {
  231. uint64_t value = tci_read_reg64(**tb_ptr);
  232. *tb_ptr += 1;
  233. return value;
  234. }
  235. #endif
  236. /* Read indexed register(s) with target address from bytecode. */
  237. static target_ulong tci_read_ulong(uint8_t **tb_ptr)
  238. {
  239. target_ulong taddr = tci_read_r(tb_ptr);
  240. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  241. taddr += (uint64_t)tci_read_r(tb_ptr) << 32;
  242. #endif
  243. return taddr;
  244. }
  245. /* Read indexed register or constant (native size) from bytecode. */
  246. static tcg_target_ulong tci_read_ri(uint8_t **tb_ptr)
  247. {
  248. tcg_target_ulong value;
  249. TCGReg r = **tb_ptr;
  250. *tb_ptr += 1;
  251. if (r == TCG_CONST) {
  252. value = tci_read_i(tb_ptr);
  253. } else {
  254. value = tci_read_reg(r);
  255. }
  256. return value;
  257. }
  258. /* Read indexed register or constant (32 bit) from bytecode. */
  259. static uint32_t tci_read_ri32(uint8_t **tb_ptr)
  260. {
  261. uint32_t value;
  262. TCGReg r = **tb_ptr;
  263. *tb_ptr += 1;
  264. if (r == TCG_CONST) {
  265. value = tci_read_i32(tb_ptr);
  266. } else {
  267. value = tci_read_reg32(r);
  268. }
  269. return value;
  270. }
  271. #if TCG_TARGET_REG_BITS == 32
  272. /* Read two indexed registers or constants (2 * 32 bit) from bytecode. */
  273. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  274. {
  275. uint32_t low = tci_read_ri32(tb_ptr);
  276. return tci_uint64(tci_read_ri32(tb_ptr), low);
  277. }
  278. #elif TCG_TARGET_REG_BITS == 64
  279. /* Read indexed register or constant (64 bit) from bytecode. */
  280. static uint64_t tci_read_ri64(uint8_t **tb_ptr)
  281. {
  282. uint64_t value;
  283. TCGReg r = **tb_ptr;
  284. *tb_ptr += 1;
  285. if (r == TCG_CONST) {
  286. value = tci_read_i64(tb_ptr);
  287. } else {
  288. value = tci_read_reg64(r);
  289. }
  290. return value;
  291. }
  292. #endif
  293. static tcg_target_ulong tci_read_label(uint8_t **tb_ptr)
  294. {
  295. tcg_target_ulong label = tci_read_i(tb_ptr);
  296. assert(label != 0);
  297. return label;
  298. }
  299. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  300. {
  301. bool result = false;
  302. int32_t i0 = u0;
  303. int32_t i1 = u1;
  304. switch (condition) {
  305. case TCG_COND_EQ:
  306. result = (u0 == u1);
  307. break;
  308. case TCG_COND_NE:
  309. result = (u0 != u1);
  310. break;
  311. case TCG_COND_LT:
  312. result = (i0 < i1);
  313. break;
  314. case TCG_COND_GE:
  315. result = (i0 >= i1);
  316. break;
  317. case TCG_COND_LE:
  318. result = (i0 <= i1);
  319. break;
  320. case TCG_COND_GT:
  321. result = (i0 > i1);
  322. break;
  323. case TCG_COND_LTU:
  324. result = (u0 < u1);
  325. break;
  326. case TCG_COND_GEU:
  327. result = (u0 >= u1);
  328. break;
  329. case TCG_COND_LEU:
  330. result = (u0 <= u1);
  331. break;
  332. case TCG_COND_GTU:
  333. result = (u0 > u1);
  334. break;
  335. default:
  336. TODO();
  337. }
  338. return result;
  339. }
  340. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  341. {
  342. bool result = false;
  343. int64_t i0 = u0;
  344. int64_t i1 = u1;
  345. switch (condition) {
  346. case TCG_COND_EQ:
  347. result = (u0 == u1);
  348. break;
  349. case TCG_COND_NE:
  350. result = (u0 != u1);
  351. break;
  352. case TCG_COND_LT:
  353. result = (i0 < i1);
  354. break;
  355. case TCG_COND_GE:
  356. result = (i0 >= i1);
  357. break;
  358. case TCG_COND_LE:
  359. result = (i0 <= i1);
  360. break;
  361. case TCG_COND_GT:
  362. result = (i0 > i1);
  363. break;
  364. case TCG_COND_LTU:
  365. result = (u0 < u1);
  366. break;
  367. case TCG_COND_GEU:
  368. result = (u0 >= u1);
  369. break;
  370. case TCG_COND_LEU:
  371. result = (u0 <= u1);
  372. break;
  373. case TCG_COND_GTU:
  374. result = (u0 > u1);
  375. break;
  376. default:
  377. TODO();
  378. }
  379. return result;
  380. }
  381. #ifdef CONFIG_SOFTMMU
  382. # define mmuidx tci_read_i(&tb_ptr)
  383. # define qemu_ld_ub \
  384. helper_ret_ldub_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  385. # define qemu_ld_leuw \
  386. helper_le_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  387. # define qemu_ld_leul \
  388. helper_le_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  389. # define qemu_ld_leq \
  390. helper_le_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  391. # define qemu_ld_beuw \
  392. helper_be_lduw_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  393. # define qemu_ld_beul \
  394. helper_be_ldul_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  395. # define qemu_ld_beq \
  396. helper_be_ldq_mmu(env, taddr, mmuidx, (uintptr_t)tb_ptr)
  397. # define qemu_st_b(X) \
  398. helper_ret_stb_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  399. # define qemu_st_lew(X) \
  400. helper_le_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  401. # define qemu_st_lel(X) \
  402. helper_le_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  403. # define qemu_st_leq(X) \
  404. helper_le_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  405. # define qemu_st_bew(X) \
  406. helper_be_stw_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  407. # define qemu_st_bel(X) \
  408. helper_be_stl_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  409. # define qemu_st_beq(X) \
  410. helper_be_stq_mmu(env, taddr, X, mmuidx, (uintptr_t)tb_ptr)
  411. #else
  412. # define qemu_ld_ub ldub_p(g2h(taddr))
  413. # define qemu_ld_leuw lduw_le_p(g2h(taddr))
  414. # define qemu_ld_leul (uint32_t)ldl_le_p(g2h(taddr))
  415. # define qemu_ld_leq ldq_le_p(g2h(taddr))
  416. # define qemu_ld_beuw lduw_be_p(g2h(taddr))
  417. # define qemu_ld_beul (uint32_t)ldl_be_p(g2h(taddr))
  418. # define qemu_ld_beq ldq_be_p(g2h(taddr))
  419. # define qemu_st_b(X) stb_p(g2h(taddr), X)
  420. # define qemu_st_lew(X) stw_le_p(g2h(taddr), X)
  421. # define qemu_st_lel(X) stl_le_p(g2h(taddr), X)
  422. # define qemu_st_leq(X) stq_le_p(g2h(taddr), X)
  423. # define qemu_st_bew(X) stw_be_p(g2h(taddr), X)
  424. # define qemu_st_bel(X) stl_be_p(g2h(taddr), X)
  425. # define qemu_st_beq(X) stq_be_p(g2h(taddr), X)
  426. #endif
  427. /* Interpret pseudo code in tb. */
  428. uintptr_t tcg_qemu_tb_exec(CPUArchState *env, uint8_t *tb_ptr)
  429. {
  430. long tcg_temps[CPU_TEMP_BUF_NLONGS];
  431. uintptr_t sp_value = (uintptr_t)(tcg_temps + CPU_TEMP_BUF_NLONGS);
  432. uintptr_t next_tb = 0;
  433. tci_reg[TCG_AREG0] = (tcg_target_ulong)env;
  434. tci_reg[TCG_REG_CALL_STACK] = sp_value;
  435. assert(tb_ptr);
  436. for (;;) {
  437. TCGOpcode opc = tb_ptr[0];
  438. #if !defined(NDEBUG)
  439. uint8_t op_size = tb_ptr[1];
  440. uint8_t *old_code_ptr = tb_ptr;
  441. #endif
  442. tcg_target_ulong t0;
  443. tcg_target_ulong t1;
  444. tcg_target_ulong t2;
  445. tcg_target_ulong label;
  446. TCGCond condition;
  447. target_ulong taddr;
  448. uint8_t tmp8;
  449. uint16_t tmp16;
  450. uint32_t tmp32;
  451. uint64_t tmp64;
  452. #if TCG_TARGET_REG_BITS == 32
  453. uint64_t v64;
  454. #endif
  455. TCGMemOp memop;
  456. #if defined(GETPC)
  457. tci_tb_ptr = (uintptr_t)tb_ptr;
  458. #endif
  459. /* Skip opcode and size entry. */
  460. tb_ptr += 2;
  461. switch (opc) {
  462. case INDEX_op_end:
  463. case INDEX_op_nop:
  464. break;
  465. case INDEX_op_nop1:
  466. case INDEX_op_nop2:
  467. case INDEX_op_nop3:
  468. case INDEX_op_nopn:
  469. case INDEX_op_discard:
  470. TODO();
  471. break;
  472. case INDEX_op_set_label:
  473. TODO();
  474. break;
  475. case INDEX_op_call:
  476. t0 = tci_read_ri(&tb_ptr);
  477. #if TCG_TARGET_REG_BITS == 32
  478. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  479. tci_read_reg(TCG_REG_R1),
  480. tci_read_reg(TCG_REG_R2),
  481. tci_read_reg(TCG_REG_R3),
  482. tci_read_reg(TCG_REG_R5),
  483. tci_read_reg(TCG_REG_R6),
  484. tci_read_reg(TCG_REG_R7),
  485. tci_read_reg(TCG_REG_R8),
  486. tci_read_reg(TCG_REG_R9),
  487. tci_read_reg(TCG_REG_R10));
  488. tci_write_reg(TCG_REG_R0, tmp64);
  489. tci_write_reg(TCG_REG_R1, tmp64 >> 32);
  490. #else
  491. tmp64 = ((helper_function)t0)(tci_read_reg(TCG_REG_R0),
  492. tci_read_reg(TCG_REG_R1),
  493. tci_read_reg(TCG_REG_R2),
  494. tci_read_reg(TCG_REG_R3),
  495. tci_read_reg(TCG_REG_R5));
  496. tci_write_reg(TCG_REG_R0, tmp64);
  497. #endif
  498. break;
  499. case INDEX_op_br:
  500. label = tci_read_label(&tb_ptr);
  501. assert(tb_ptr == old_code_ptr + op_size);
  502. tb_ptr = (uint8_t *)label;
  503. continue;
  504. case INDEX_op_setcond_i32:
  505. t0 = *tb_ptr++;
  506. t1 = tci_read_r32(&tb_ptr);
  507. t2 = tci_read_ri32(&tb_ptr);
  508. condition = *tb_ptr++;
  509. tci_write_reg32(t0, tci_compare32(t1, t2, condition));
  510. break;
  511. #if TCG_TARGET_REG_BITS == 32
  512. case INDEX_op_setcond2_i32:
  513. t0 = *tb_ptr++;
  514. tmp64 = tci_read_r64(&tb_ptr);
  515. v64 = tci_read_ri64(&tb_ptr);
  516. condition = *tb_ptr++;
  517. tci_write_reg32(t0, tci_compare64(tmp64, v64, condition));
  518. break;
  519. #elif TCG_TARGET_REG_BITS == 64
  520. case INDEX_op_setcond_i64:
  521. t0 = *tb_ptr++;
  522. t1 = tci_read_r64(&tb_ptr);
  523. t2 = tci_read_ri64(&tb_ptr);
  524. condition = *tb_ptr++;
  525. tci_write_reg64(t0, tci_compare64(t1, t2, condition));
  526. break;
  527. #endif
  528. case INDEX_op_mov_i32:
  529. t0 = *tb_ptr++;
  530. t1 = tci_read_r32(&tb_ptr);
  531. tci_write_reg32(t0, t1);
  532. break;
  533. case INDEX_op_movi_i32:
  534. t0 = *tb_ptr++;
  535. t1 = tci_read_i32(&tb_ptr);
  536. tci_write_reg32(t0, t1);
  537. break;
  538. /* Load/store operations (32 bit). */
  539. case INDEX_op_ld8u_i32:
  540. t0 = *tb_ptr++;
  541. t1 = tci_read_r(&tb_ptr);
  542. t2 = tci_read_s32(&tb_ptr);
  543. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  544. break;
  545. case INDEX_op_ld8s_i32:
  546. case INDEX_op_ld16u_i32:
  547. TODO();
  548. break;
  549. case INDEX_op_ld16s_i32:
  550. TODO();
  551. break;
  552. case INDEX_op_ld_i32:
  553. t0 = *tb_ptr++;
  554. t1 = tci_read_r(&tb_ptr);
  555. t2 = tci_read_s32(&tb_ptr);
  556. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  557. break;
  558. case INDEX_op_st8_i32:
  559. t0 = tci_read_r8(&tb_ptr);
  560. t1 = tci_read_r(&tb_ptr);
  561. t2 = tci_read_s32(&tb_ptr);
  562. *(uint8_t *)(t1 + t2) = t0;
  563. break;
  564. case INDEX_op_st16_i32:
  565. t0 = tci_read_r16(&tb_ptr);
  566. t1 = tci_read_r(&tb_ptr);
  567. t2 = tci_read_s32(&tb_ptr);
  568. *(uint16_t *)(t1 + t2) = t0;
  569. break;
  570. case INDEX_op_st_i32:
  571. t0 = tci_read_r32(&tb_ptr);
  572. t1 = tci_read_r(&tb_ptr);
  573. t2 = tci_read_s32(&tb_ptr);
  574. assert(t1 != sp_value || (int32_t)t2 < 0);
  575. *(uint32_t *)(t1 + t2) = t0;
  576. break;
  577. /* Arithmetic operations (32 bit). */
  578. case INDEX_op_add_i32:
  579. t0 = *tb_ptr++;
  580. t1 = tci_read_ri32(&tb_ptr);
  581. t2 = tci_read_ri32(&tb_ptr);
  582. tci_write_reg32(t0, t1 + t2);
  583. break;
  584. case INDEX_op_sub_i32:
  585. t0 = *tb_ptr++;
  586. t1 = tci_read_ri32(&tb_ptr);
  587. t2 = tci_read_ri32(&tb_ptr);
  588. tci_write_reg32(t0, t1 - t2);
  589. break;
  590. case INDEX_op_mul_i32:
  591. t0 = *tb_ptr++;
  592. t1 = tci_read_ri32(&tb_ptr);
  593. t2 = tci_read_ri32(&tb_ptr);
  594. tci_write_reg32(t0, t1 * t2);
  595. break;
  596. #if TCG_TARGET_HAS_div_i32
  597. case INDEX_op_div_i32:
  598. t0 = *tb_ptr++;
  599. t1 = tci_read_ri32(&tb_ptr);
  600. t2 = tci_read_ri32(&tb_ptr);
  601. tci_write_reg32(t0, (int32_t)t1 / (int32_t)t2);
  602. break;
  603. case INDEX_op_divu_i32:
  604. t0 = *tb_ptr++;
  605. t1 = tci_read_ri32(&tb_ptr);
  606. t2 = tci_read_ri32(&tb_ptr);
  607. tci_write_reg32(t0, t1 / t2);
  608. break;
  609. case INDEX_op_rem_i32:
  610. t0 = *tb_ptr++;
  611. t1 = tci_read_ri32(&tb_ptr);
  612. t2 = tci_read_ri32(&tb_ptr);
  613. tci_write_reg32(t0, (int32_t)t1 % (int32_t)t2);
  614. break;
  615. case INDEX_op_remu_i32:
  616. t0 = *tb_ptr++;
  617. t1 = tci_read_ri32(&tb_ptr);
  618. t2 = tci_read_ri32(&tb_ptr);
  619. tci_write_reg32(t0, t1 % t2);
  620. break;
  621. #elif TCG_TARGET_HAS_div2_i32
  622. case INDEX_op_div2_i32:
  623. case INDEX_op_divu2_i32:
  624. TODO();
  625. break;
  626. #endif
  627. case INDEX_op_and_i32:
  628. t0 = *tb_ptr++;
  629. t1 = tci_read_ri32(&tb_ptr);
  630. t2 = tci_read_ri32(&tb_ptr);
  631. tci_write_reg32(t0, t1 & t2);
  632. break;
  633. case INDEX_op_or_i32:
  634. t0 = *tb_ptr++;
  635. t1 = tci_read_ri32(&tb_ptr);
  636. t2 = tci_read_ri32(&tb_ptr);
  637. tci_write_reg32(t0, t1 | t2);
  638. break;
  639. case INDEX_op_xor_i32:
  640. t0 = *tb_ptr++;
  641. t1 = tci_read_ri32(&tb_ptr);
  642. t2 = tci_read_ri32(&tb_ptr);
  643. tci_write_reg32(t0, t1 ^ t2);
  644. break;
  645. /* Shift/rotate operations (32 bit). */
  646. case INDEX_op_shl_i32:
  647. t0 = *tb_ptr++;
  648. t1 = tci_read_ri32(&tb_ptr);
  649. t2 = tci_read_ri32(&tb_ptr);
  650. tci_write_reg32(t0, t1 << (t2 & 31));
  651. break;
  652. case INDEX_op_shr_i32:
  653. t0 = *tb_ptr++;
  654. t1 = tci_read_ri32(&tb_ptr);
  655. t2 = tci_read_ri32(&tb_ptr);
  656. tci_write_reg32(t0, t1 >> (t2 & 31));
  657. break;
  658. case INDEX_op_sar_i32:
  659. t0 = *tb_ptr++;
  660. t1 = tci_read_ri32(&tb_ptr);
  661. t2 = tci_read_ri32(&tb_ptr);
  662. tci_write_reg32(t0, ((int32_t)t1 >> (t2 & 31)));
  663. break;
  664. #if TCG_TARGET_HAS_rot_i32
  665. case INDEX_op_rotl_i32:
  666. t0 = *tb_ptr++;
  667. t1 = tci_read_ri32(&tb_ptr);
  668. t2 = tci_read_ri32(&tb_ptr);
  669. tci_write_reg32(t0, rol32(t1, t2 & 31));
  670. break;
  671. case INDEX_op_rotr_i32:
  672. t0 = *tb_ptr++;
  673. t1 = tci_read_ri32(&tb_ptr);
  674. t2 = tci_read_ri32(&tb_ptr);
  675. tci_write_reg32(t0, ror32(t1, t2 & 31));
  676. break;
  677. #endif
  678. #if TCG_TARGET_HAS_deposit_i32
  679. case INDEX_op_deposit_i32:
  680. t0 = *tb_ptr++;
  681. t1 = tci_read_r32(&tb_ptr);
  682. t2 = tci_read_r32(&tb_ptr);
  683. tmp16 = *tb_ptr++;
  684. tmp8 = *tb_ptr++;
  685. tmp32 = (((1 << tmp8) - 1) << tmp16);
  686. tci_write_reg32(t0, (t1 & ~tmp32) | ((t2 << tmp16) & tmp32));
  687. break;
  688. #endif
  689. case INDEX_op_brcond_i32:
  690. t0 = tci_read_r32(&tb_ptr);
  691. t1 = tci_read_ri32(&tb_ptr);
  692. condition = *tb_ptr++;
  693. label = tci_read_label(&tb_ptr);
  694. if (tci_compare32(t0, t1, condition)) {
  695. assert(tb_ptr == old_code_ptr + op_size);
  696. tb_ptr = (uint8_t *)label;
  697. continue;
  698. }
  699. break;
  700. #if TCG_TARGET_REG_BITS == 32
  701. case INDEX_op_add2_i32:
  702. t0 = *tb_ptr++;
  703. t1 = *tb_ptr++;
  704. tmp64 = tci_read_r64(&tb_ptr);
  705. tmp64 += tci_read_r64(&tb_ptr);
  706. tci_write_reg64(t1, t0, tmp64);
  707. break;
  708. case INDEX_op_sub2_i32:
  709. t0 = *tb_ptr++;
  710. t1 = *tb_ptr++;
  711. tmp64 = tci_read_r64(&tb_ptr);
  712. tmp64 -= tci_read_r64(&tb_ptr);
  713. tci_write_reg64(t1, t0, tmp64);
  714. break;
  715. case INDEX_op_brcond2_i32:
  716. tmp64 = tci_read_r64(&tb_ptr);
  717. v64 = tci_read_ri64(&tb_ptr);
  718. condition = *tb_ptr++;
  719. label = tci_read_label(&tb_ptr);
  720. if (tci_compare64(tmp64, v64, condition)) {
  721. assert(tb_ptr == old_code_ptr + op_size);
  722. tb_ptr = (uint8_t *)label;
  723. continue;
  724. }
  725. break;
  726. case INDEX_op_mulu2_i32:
  727. t0 = *tb_ptr++;
  728. t1 = *tb_ptr++;
  729. t2 = tci_read_r32(&tb_ptr);
  730. tmp64 = tci_read_r32(&tb_ptr);
  731. tci_write_reg64(t1, t0, t2 * tmp64);
  732. break;
  733. #endif /* TCG_TARGET_REG_BITS == 32 */
  734. #if TCG_TARGET_HAS_ext8s_i32
  735. case INDEX_op_ext8s_i32:
  736. t0 = *tb_ptr++;
  737. t1 = tci_read_r8s(&tb_ptr);
  738. tci_write_reg32(t0, t1);
  739. break;
  740. #endif
  741. #if TCG_TARGET_HAS_ext16s_i32
  742. case INDEX_op_ext16s_i32:
  743. t0 = *tb_ptr++;
  744. t1 = tci_read_r16s(&tb_ptr);
  745. tci_write_reg32(t0, t1);
  746. break;
  747. #endif
  748. #if TCG_TARGET_HAS_ext8u_i32
  749. case INDEX_op_ext8u_i32:
  750. t0 = *tb_ptr++;
  751. t1 = tci_read_r8(&tb_ptr);
  752. tci_write_reg32(t0, t1);
  753. break;
  754. #endif
  755. #if TCG_TARGET_HAS_ext16u_i32
  756. case INDEX_op_ext16u_i32:
  757. t0 = *tb_ptr++;
  758. t1 = tci_read_r16(&tb_ptr);
  759. tci_write_reg32(t0, t1);
  760. break;
  761. #endif
  762. #if TCG_TARGET_HAS_bswap16_i32
  763. case INDEX_op_bswap16_i32:
  764. t0 = *tb_ptr++;
  765. t1 = tci_read_r16(&tb_ptr);
  766. tci_write_reg32(t0, bswap16(t1));
  767. break;
  768. #endif
  769. #if TCG_TARGET_HAS_bswap32_i32
  770. case INDEX_op_bswap32_i32:
  771. t0 = *tb_ptr++;
  772. t1 = tci_read_r32(&tb_ptr);
  773. tci_write_reg32(t0, bswap32(t1));
  774. break;
  775. #endif
  776. #if TCG_TARGET_HAS_not_i32
  777. case INDEX_op_not_i32:
  778. t0 = *tb_ptr++;
  779. t1 = tci_read_r32(&tb_ptr);
  780. tci_write_reg32(t0, ~t1);
  781. break;
  782. #endif
  783. #if TCG_TARGET_HAS_neg_i32
  784. case INDEX_op_neg_i32:
  785. t0 = *tb_ptr++;
  786. t1 = tci_read_r32(&tb_ptr);
  787. tci_write_reg32(t0, -t1);
  788. break;
  789. #endif
  790. #if TCG_TARGET_REG_BITS == 64
  791. case INDEX_op_mov_i64:
  792. t0 = *tb_ptr++;
  793. t1 = tci_read_r64(&tb_ptr);
  794. tci_write_reg64(t0, t1);
  795. break;
  796. case INDEX_op_movi_i64:
  797. t0 = *tb_ptr++;
  798. t1 = tci_read_i64(&tb_ptr);
  799. tci_write_reg64(t0, t1);
  800. break;
  801. /* Load/store operations (64 bit). */
  802. case INDEX_op_ld8u_i64:
  803. t0 = *tb_ptr++;
  804. t1 = tci_read_r(&tb_ptr);
  805. t2 = tci_read_s32(&tb_ptr);
  806. tci_write_reg8(t0, *(uint8_t *)(t1 + t2));
  807. break;
  808. case INDEX_op_ld8s_i64:
  809. case INDEX_op_ld16u_i64:
  810. case INDEX_op_ld16s_i64:
  811. TODO();
  812. break;
  813. case INDEX_op_ld32u_i64:
  814. t0 = *tb_ptr++;
  815. t1 = tci_read_r(&tb_ptr);
  816. t2 = tci_read_s32(&tb_ptr);
  817. tci_write_reg32(t0, *(uint32_t *)(t1 + t2));
  818. break;
  819. case INDEX_op_ld32s_i64:
  820. t0 = *tb_ptr++;
  821. t1 = tci_read_r(&tb_ptr);
  822. t2 = tci_read_s32(&tb_ptr);
  823. tci_write_reg32s(t0, *(int32_t *)(t1 + t2));
  824. break;
  825. case INDEX_op_ld_i64:
  826. t0 = *tb_ptr++;
  827. t1 = tci_read_r(&tb_ptr);
  828. t2 = tci_read_s32(&tb_ptr);
  829. tci_write_reg64(t0, *(uint64_t *)(t1 + t2));
  830. break;
  831. case INDEX_op_st8_i64:
  832. t0 = tci_read_r8(&tb_ptr);
  833. t1 = tci_read_r(&tb_ptr);
  834. t2 = tci_read_s32(&tb_ptr);
  835. *(uint8_t *)(t1 + t2) = t0;
  836. break;
  837. case INDEX_op_st16_i64:
  838. t0 = tci_read_r16(&tb_ptr);
  839. t1 = tci_read_r(&tb_ptr);
  840. t2 = tci_read_s32(&tb_ptr);
  841. *(uint16_t *)(t1 + t2) = t0;
  842. break;
  843. case INDEX_op_st32_i64:
  844. t0 = tci_read_r32(&tb_ptr);
  845. t1 = tci_read_r(&tb_ptr);
  846. t2 = tci_read_s32(&tb_ptr);
  847. *(uint32_t *)(t1 + t2) = t0;
  848. break;
  849. case INDEX_op_st_i64:
  850. t0 = tci_read_r64(&tb_ptr);
  851. t1 = tci_read_r(&tb_ptr);
  852. t2 = tci_read_s32(&tb_ptr);
  853. assert(t1 != sp_value || (int32_t)t2 < 0);
  854. *(uint64_t *)(t1 + t2) = t0;
  855. break;
  856. /* Arithmetic operations (64 bit). */
  857. case INDEX_op_add_i64:
  858. t0 = *tb_ptr++;
  859. t1 = tci_read_ri64(&tb_ptr);
  860. t2 = tci_read_ri64(&tb_ptr);
  861. tci_write_reg64(t0, t1 + t2);
  862. break;
  863. case INDEX_op_sub_i64:
  864. t0 = *tb_ptr++;
  865. t1 = tci_read_ri64(&tb_ptr);
  866. t2 = tci_read_ri64(&tb_ptr);
  867. tci_write_reg64(t0, t1 - t2);
  868. break;
  869. case INDEX_op_mul_i64:
  870. t0 = *tb_ptr++;
  871. t1 = tci_read_ri64(&tb_ptr);
  872. t2 = tci_read_ri64(&tb_ptr);
  873. tci_write_reg64(t0, t1 * t2);
  874. break;
  875. #if TCG_TARGET_HAS_div_i64
  876. case INDEX_op_div_i64:
  877. case INDEX_op_divu_i64:
  878. case INDEX_op_rem_i64:
  879. case INDEX_op_remu_i64:
  880. TODO();
  881. break;
  882. #elif TCG_TARGET_HAS_div2_i64
  883. case INDEX_op_div2_i64:
  884. case INDEX_op_divu2_i64:
  885. TODO();
  886. break;
  887. #endif
  888. case INDEX_op_and_i64:
  889. t0 = *tb_ptr++;
  890. t1 = tci_read_ri64(&tb_ptr);
  891. t2 = tci_read_ri64(&tb_ptr);
  892. tci_write_reg64(t0, t1 & t2);
  893. break;
  894. case INDEX_op_or_i64:
  895. t0 = *tb_ptr++;
  896. t1 = tci_read_ri64(&tb_ptr);
  897. t2 = tci_read_ri64(&tb_ptr);
  898. tci_write_reg64(t0, t1 | t2);
  899. break;
  900. case INDEX_op_xor_i64:
  901. t0 = *tb_ptr++;
  902. t1 = tci_read_ri64(&tb_ptr);
  903. t2 = tci_read_ri64(&tb_ptr);
  904. tci_write_reg64(t0, t1 ^ t2);
  905. break;
  906. /* Shift/rotate operations (64 bit). */
  907. case INDEX_op_shl_i64:
  908. t0 = *tb_ptr++;
  909. t1 = tci_read_ri64(&tb_ptr);
  910. t2 = tci_read_ri64(&tb_ptr);
  911. tci_write_reg64(t0, t1 << (t2 & 63));
  912. break;
  913. case INDEX_op_shr_i64:
  914. t0 = *tb_ptr++;
  915. t1 = tci_read_ri64(&tb_ptr);
  916. t2 = tci_read_ri64(&tb_ptr);
  917. tci_write_reg64(t0, t1 >> (t2 & 63));
  918. break;
  919. case INDEX_op_sar_i64:
  920. t0 = *tb_ptr++;
  921. t1 = tci_read_ri64(&tb_ptr);
  922. t2 = tci_read_ri64(&tb_ptr);
  923. tci_write_reg64(t0, ((int64_t)t1 >> (t2 & 63)));
  924. break;
  925. #if TCG_TARGET_HAS_rot_i64
  926. case INDEX_op_rotl_i64:
  927. t0 = *tb_ptr++;
  928. t1 = tci_read_ri64(&tb_ptr);
  929. t2 = tci_read_ri64(&tb_ptr);
  930. tci_write_reg64(t0, rol64(t1, t2 & 63));
  931. break;
  932. case INDEX_op_rotr_i64:
  933. t0 = *tb_ptr++;
  934. t1 = tci_read_ri64(&tb_ptr);
  935. t2 = tci_read_ri64(&tb_ptr);
  936. tci_write_reg64(t0, ror64(t1, t2 & 63));
  937. break;
  938. #endif
  939. #if TCG_TARGET_HAS_deposit_i64
  940. case INDEX_op_deposit_i64:
  941. t0 = *tb_ptr++;
  942. t1 = tci_read_r64(&tb_ptr);
  943. t2 = tci_read_r64(&tb_ptr);
  944. tmp16 = *tb_ptr++;
  945. tmp8 = *tb_ptr++;
  946. tmp64 = (((1ULL << tmp8) - 1) << tmp16);
  947. tci_write_reg64(t0, (t1 & ~tmp64) | ((t2 << tmp16) & tmp64));
  948. break;
  949. #endif
  950. case INDEX_op_brcond_i64:
  951. t0 = tci_read_r64(&tb_ptr);
  952. t1 = tci_read_ri64(&tb_ptr);
  953. condition = *tb_ptr++;
  954. label = tci_read_label(&tb_ptr);
  955. if (tci_compare64(t0, t1, condition)) {
  956. assert(tb_ptr == old_code_ptr + op_size);
  957. tb_ptr = (uint8_t *)label;
  958. continue;
  959. }
  960. break;
  961. #if TCG_TARGET_HAS_ext8u_i64
  962. case INDEX_op_ext8u_i64:
  963. t0 = *tb_ptr++;
  964. t1 = tci_read_r8(&tb_ptr);
  965. tci_write_reg64(t0, t1);
  966. break;
  967. #endif
  968. #if TCG_TARGET_HAS_ext8s_i64
  969. case INDEX_op_ext8s_i64:
  970. t0 = *tb_ptr++;
  971. t1 = tci_read_r8s(&tb_ptr);
  972. tci_write_reg64(t0, t1);
  973. break;
  974. #endif
  975. #if TCG_TARGET_HAS_ext16s_i64
  976. case INDEX_op_ext16s_i64:
  977. t0 = *tb_ptr++;
  978. t1 = tci_read_r16s(&tb_ptr);
  979. tci_write_reg64(t0, t1);
  980. break;
  981. #endif
  982. #if TCG_TARGET_HAS_ext16u_i64
  983. case INDEX_op_ext16u_i64:
  984. t0 = *tb_ptr++;
  985. t1 = tci_read_r16(&tb_ptr);
  986. tci_write_reg64(t0, t1);
  987. break;
  988. #endif
  989. #if TCG_TARGET_HAS_ext32s_i64
  990. case INDEX_op_ext32s_i64:
  991. t0 = *tb_ptr++;
  992. t1 = tci_read_r32s(&tb_ptr);
  993. tci_write_reg64(t0, t1);
  994. break;
  995. #endif
  996. #if TCG_TARGET_HAS_ext32u_i64
  997. case INDEX_op_ext32u_i64:
  998. t0 = *tb_ptr++;
  999. t1 = tci_read_r32(&tb_ptr);
  1000. tci_write_reg64(t0, t1);
  1001. break;
  1002. #endif
  1003. #if TCG_TARGET_HAS_bswap16_i64
  1004. case INDEX_op_bswap16_i64:
  1005. TODO();
  1006. t0 = *tb_ptr++;
  1007. t1 = tci_read_r16(&tb_ptr);
  1008. tci_write_reg64(t0, bswap16(t1));
  1009. break;
  1010. #endif
  1011. #if TCG_TARGET_HAS_bswap32_i64
  1012. case INDEX_op_bswap32_i64:
  1013. t0 = *tb_ptr++;
  1014. t1 = tci_read_r32(&tb_ptr);
  1015. tci_write_reg64(t0, bswap32(t1));
  1016. break;
  1017. #endif
  1018. #if TCG_TARGET_HAS_bswap64_i64
  1019. case INDEX_op_bswap64_i64:
  1020. t0 = *tb_ptr++;
  1021. t1 = tci_read_r64(&tb_ptr);
  1022. tci_write_reg64(t0, bswap64(t1));
  1023. break;
  1024. #endif
  1025. #if TCG_TARGET_HAS_not_i64
  1026. case INDEX_op_not_i64:
  1027. t0 = *tb_ptr++;
  1028. t1 = tci_read_r64(&tb_ptr);
  1029. tci_write_reg64(t0, ~t1);
  1030. break;
  1031. #endif
  1032. #if TCG_TARGET_HAS_neg_i64
  1033. case INDEX_op_neg_i64:
  1034. t0 = *tb_ptr++;
  1035. t1 = tci_read_r64(&tb_ptr);
  1036. tci_write_reg64(t0, -t1);
  1037. break;
  1038. #endif
  1039. #endif /* TCG_TARGET_REG_BITS == 64 */
  1040. /* QEMU specific operations. */
  1041. #if TARGET_LONG_BITS > TCG_TARGET_REG_BITS
  1042. case INDEX_op_debug_insn_start:
  1043. TODO();
  1044. break;
  1045. #else
  1046. case INDEX_op_debug_insn_start:
  1047. TODO();
  1048. break;
  1049. #endif
  1050. case INDEX_op_exit_tb:
  1051. next_tb = *(uint64_t *)tb_ptr;
  1052. goto exit;
  1053. break;
  1054. case INDEX_op_goto_tb:
  1055. t0 = tci_read_i32(&tb_ptr);
  1056. assert(tb_ptr == old_code_ptr + op_size);
  1057. tb_ptr += (int32_t)t0;
  1058. continue;
  1059. case INDEX_op_qemu_ld_i32:
  1060. t0 = *tb_ptr++;
  1061. taddr = tci_read_ulong(&tb_ptr);
  1062. memop = tci_read_i(&tb_ptr);
  1063. switch (memop) {
  1064. case MO_UB:
  1065. tmp32 = qemu_ld_ub;
  1066. break;
  1067. case MO_SB:
  1068. tmp32 = (int8_t)qemu_ld_ub;
  1069. break;
  1070. case MO_LEUW:
  1071. tmp32 = qemu_ld_leuw;
  1072. break;
  1073. case MO_LESW:
  1074. tmp32 = (int16_t)qemu_ld_leuw;
  1075. break;
  1076. case MO_LEUL:
  1077. tmp32 = qemu_ld_leul;
  1078. break;
  1079. case MO_BEUW:
  1080. tmp32 = qemu_ld_beuw;
  1081. break;
  1082. case MO_BESW:
  1083. tmp32 = (int16_t)qemu_ld_beuw;
  1084. break;
  1085. case MO_BEUL:
  1086. tmp32 = qemu_ld_beul;
  1087. break;
  1088. default:
  1089. tcg_abort();
  1090. }
  1091. tci_write_reg(t0, tmp32);
  1092. break;
  1093. case INDEX_op_qemu_ld_i64:
  1094. t0 = *tb_ptr++;
  1095. if (TCG_TARGET_REG_BITS == 32) {
  1096. t1 = *tb_ptr++;
  1097. }
  1098. taddr = tci_read_ulong(&tb_ptr);
  1099. memop = tci_read_i(&tb_ptr);
  1100. switch (memop) {
  1101. case MO_UB:
  1102. tmp64 = qemu_ld_ub;
  1103. break;
  1104. case MO_SB:
  1105. tmp64 = (int8_t)qemu_ld_ub;
  1106. break;
  1107. case MO_LEUW:
  1108. tmp64 = qemu_ld_leuw;
  1109. break;
  1110. case MO_LESW:
  1111. tmp64 = (int16_t)qemu_ld_leuw;
  1112. break;
  1113. case MO_LEUL:
  1114. tmp64 = qemu_ld_leul;
  1115. break;
  1116. case MO_LESL:
  1117. tmp64 = (int32_t)qemu_ld_leul;
  1118. break;
  1119. case MO_LEQ:
  1120. tmp64 = qemu_ld_leq;
  1121. break;
  1122. case MO_BEUW:
  1123. tmp64 = qemu_ld_beuw;
  1124. break;
  1125. case MO_BESW:
  1126. tmp64 = (int16_t)qemu_ld_beuw;
  1127. break;
  1128. case MO_BEUL:
  1129. tmp64 = qemu_ld_beul;
  1130. break;
  1131. case MO_BESL:
  1132. tmp64 = (int32_t)qemu_ld_beul;
  1133. break;
  1134. case MO_BEQ:
  1135. tmp64 = qemu_ld_beq;
  1136. break;
  1137. default:
  1138. tcg_abort();
  1139. }
  1140. tci_write_reg(t0, tmp64);
  1141. if (TCG_TARGET_REG_BITS == 32) {
  1142. tci_write_reg(t1, tmp64 >> 32);
  1143. }
  1144. break;
  1145. case INDEX_op_qemu_st_i32:
  1146. t0 = tci_read_r(&tb_ptr);
  1147. taddr = tci_read_ulong(&tb_ptr);
  1148. memop = tci_read_i(&tb_ptr);
  1149. switch (memop) {
  1150. case MO_UB:
  1151. qemu_st_b(t0);
  1152. break;
  1153. case MO_LEUW:
  1154. qemu_st_lew(t0);
  1155. break;
  1156. case MO_LEUL:
  1157. qemu_st_lel(t0);
  1158. break;
  1159. case MO_BEUW:
  1160. qemu_st_bew(t0);
  1161. break;
  1162. case MO_BEUL:
  1163. qemu_st_bel(t0);
  1164. break;
  1165. default:
  1166. tcg_abort();
  1167. }
  1168. break;
  1169. case INDEX_op_qemu_st_i64:
  1170. tmp64 = tci_read_r64(&tb_ptr);
  1171. taddr = tci_read_ulong(&tb_ptr);
  1172. memop = tci_read_i(&tb_ptr);
  1173. switch (memop) {
  1174. case MO_UB:
  1175. qemu_st_b(tmp64);
  1176. break;
  1177. case MO_LEUW:
  1178. qemu_st_lew(tmp64);
  1179. break;
  1180. case MO_LEUL:
  1181. qemu_st_lel(tmp64);
  1182. break;
  1183. case MO_LEQ:
  1184. qemu_st_leq(tmp64);
  1185. break;
  1186. case MO_BEUW:
  1187. qemu_st_bew(tmp64);
  1188. break;
  1189. case MO_BEUL:
  1190. qemu_st_bel(tmp64);
  1191. break;
  1192. case MO_BEQ:
  1193. qemu_st_beq(tmp64);
  1194. break;
  1195. default:
  1196. tcg_abort();
  1197. }
  1198. break;
  1199. default:
  1200. TODO();
  1201. break;
  1202. }
  1203. assert(tb_ptr == old_code_ptr + op_size);
  1204. }
  1205. exit:
  1206. return next_tb;
  1207. }