2
0

tci.c 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247
  1. /*
  2. * Tiny Code Interpreter for QEMU
  3. *
  4. * Copyright (c) 2009, 2011, 2016 Stefan Weil
  5. *
  6. * This program is free software: you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation, either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "qemu/osdep.h"
  20. #include "tcg/tcg.h"
  21. #include "tcg/helper-info.h"
  22. #include "tcg/tcg-ldst.h"
  23. #include "disas/dis-asm.h"
  24. #include "tcg-has.h"
  25. #include <ffi.h>
  26. /*
  27. * Enable TCI assertions only when debugging TCG (and without NDEBUG defined).
  28. * Without assertions, the interpreter runs much faster.
  29. */
  30. #if defined(CONFIG_DEBUG_TCG)
  31. # define tci_assert(cond) assert(cond)
  32. #else
  33. # define tci_assert(cond) ((void)(cond))
  34. #endif
  35. __thread uintptr_t tci_tb_ptr;
  36. static void tci_write_reg64(tcg_target_ulong *regs, uint32_t high_index,
  37. uint32_t low_index, uint64_t value)
  38. {
  39. regs[low_index] = (uint32_t)value;
  40. regs[high_index] = value >> 32;
  41. }
  42. /* Create a 64 bit value from two 32 bit values. */
  43. static uint64_t tci_uint64(uint32_t high, uint32_t low)
  44. {
  45. return ((uint64_t)high << 32) + low;
  46. }
  47. /*
  48. * Load sets of arguments all at once. The naming convention is:
  49. * tci_args_<arguments>
  50. * where arguments is a sequence of
  51. *
  52. * b = immediate (bit position)
  53. * c = condition (TCGCond)
  54. * i = immediate (uint32_t)
  55. * I = immediate (tcg_target_ulong)
  56. * l = label or pointer
  57. * m = immediate (MemOpIdx)
  58. * n = immediate (call return length)
  59. * r = register
  60. * s = signed ldst offset
  61. */
  62. static void tci_args_l(uint32_t insn, const void *tb_ptr, void **l0)
  63. {
  64. int diff = sextract32(insn, 12, 20);
  65. *l0 = diff ? (void *)tb_ptr + diff : NULL;
  66. }
  67. static void tci_args_r(uint32_t insn, TCGReg *r0)
  68. {
  69. *r0 = extract32(insn, 8, 4);
  70. }
  71. static void tci_args_nl(uint32_t insn, const void *tb_ptr,
  72. uint8_t *n0, void **l1)
  73. {
  74. *n0 = extract32(insn, 8, 4);
  75. *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
  76. }
  77. static void tci_args_rl(uint32_t insn, const void *tb_ptr,
  78. TCGReg *r0, void **l1)
  79. {
  80. *r0 = extract32(insn, 8, 4);
  81. *l1 = sextract32(insn, 12, 20) + (void *)tb_ptr;
  82. }
  83. static void tci_args_rr(uint32_t insn, TCGReg *r0, TCGReg *r1)
  84. {
  85. *r0 = extract32(insn, 8, 4);
  86. *r1 = extract32(insn, 12, 4);
  87. }
  88. static void tci_args_ri(uint32_t insn, TCGReg *r0, tcg_target_ulong *i1)
  89. {
  90. *r0 = extract32(insn, 8, 4);
  91. *i1 = sextract32(insn, 12, 20);
  92. }
  93. static void tci_args_rrm(uint32_t insn, TCGReg *r0,
  94. TCGReg *r1, MemOpIdx *m2)
  95. {
  96. *r0 = extract32(insn, 8, 4);
  97. *r1 = extract32(insn, 12, 4);
  98. *m2 = extract32(insn, 16, 16);
  99. }
  100. static void tci_args_rrr(uint32_t insn, TCGReg *r0, TCGReg *r1, TCGReg *r2)
  101. {
  102. *r0 = extract32(insn, 8, 4);
  103. *r1 = extract32(insn, 12, 4);
  104. *r2 = extract32(insn, 16, 4);
  105. }
  106. static void tci_args_rrs(uint32_t insn, TCGReg *r0, TCGReg *r1, int32_t *i2)
  107. {
  108. *r0 = extract32(insn, 8, 4);
  109. *r1 = extract32(insn, 12, 4);
  110. *i2 = sextract32(insn, 16, 16);
  111. }
  112. static void tci_args_rrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
  113. uint8_t *i2, uint8_t *i3)
  114. {
  115. *r0 = extract32(insn, 8, 4);
  116. *r1 = extract32(insn, 12, 4);
  117. *i2 = extract32(insn, 16, 6);
  118. *i3 = extract32(insn, 22, 6);
  119. }
  120. static void tci_args_rrrc(uint32_t insn,
  121. TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGCond *c3)
  122. {
  123. *r0 = extract32(insn, 8, 4);
  124. *r1 = extract32(insn, 12, 4);
  125. *r2 = extract32(insn, 16, 4);
  126. *c3 = extract32(insn, 20, 4);
  127. }
  128. static void tci_args_rrrbb(uint32_t insn, TCGReg *r0, TCGReg *r1,
  129. TCGReg *r2, uint8_t *i3, uint8_t *i4)
  130. {
  131. *r0 = extract32(insn, 8, 4);
  132. *r1 = extract32(insn, 12, 4);
  133. *r2 = extract32(insn, 16, 4);
  134. *i3 = extract32(insn, 20, 6);
  135. *i4 = extract32(insn, 26, 6);
  136. }
  137. static void tci_args_rrrr(uint32_t insn,
  138. TCGReg *r0, TCGReg *r1, TCGReg *r2, TCGReg *r3)
  139. {
  140. *r0 = extract32(insn, 8, 4);
  141. *r1 = extract32(insn, 12, 4);
  142. *r2 = extract32(insn, 16, 4);
  143. *r3 = extract32(insn, 20, 4);
  144. }
  145. static void tci_args_rrrrrc(uint32_t insn, TCGReg *r0, TCGReg *r1,
  146. TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGCond *c5)
  147. {
  148. *r0 = extract32(insn, 8, 4);
  149. *r1 = extract32(insn, 12, 4);
  150. *r2 = extract32(insn, 16, 4);
  151. *r3 = extract32(insn, 20, 4);
  152. *r4 = extract32(insn, 24, 4);
  153. *c5 = extract32(insn, 28, 4);
  154. }
  155. static void tci_args_rrrrrr(uint32_t insn, TCGReg *r0, TCGReg *r1,
  156. TCGReg *r2, TCGReg *r3, TCGReg *r4, TCGReg *r5)
  157. {
  158. *r0 = extract32(insn, 8, 4);
  159. *r1 = extract32(insn, 12, 4);
  160. *r2 = extract32(insn, 16, 4);
  161. *r3 = extract32(insn, 20, 4);
  162. *r4 = extract32(insn, 24, 4);
  163. *r5 = extract32(insn, 28, 4);
  164. }
  165. static bool tci_compare32(uint32_t u0, uint32_t u1, TCGCond condition)
  166. {
  167. bool result = false;
  168. int32_t i0 = u0;
  169. int32_t i1 = u1;
  170. switch (condition) {
  171. case TCG_COND_EQ:
  172. result = (u0 == u1);
  173. break;
  174. case TCG_COND_NE:
  175. result = (u0 != u1);
  176. break;
  177. case TCG_COND_LT:
  178. result = (i0 < i1);
  179. break;
  180. case TCG_COND_GE:
  181. result = (i0 >= i1);
  182. break;
  183. case TCG_COND_LE:
  184. result = (i0 <= i1);
  185. break;
  186. case TCG_COND_GT:
  187. result = (i0 > i1);
  188. break;
  189. case TCG_COND_LTU:
  190. result = (u0 < u1);
  191. break;
  192. case TCG_COND_GEU:
  193. result = (u0 >= u1);
  194. break;
  195. case TCG_COND_LEU:
  196. result = (u0 <= u1);
  197. break;
  198. case TCG_COND_GTU:
  199. result = (u0 > u1);
  200. break;
  201. case TCG_COND_TSTEQ:
  202. result = (u0 & u1) == 0;
  203. break;
  204. case TCG_COND_TSTNE:
  205. result = (u0 & u1) != 0;
  206. break;
  207. default:
  208. g_assert_not_reached();
  209. }
  210. return result;
  211. }
  212. static bool tci_compare64(uint64_t u0, uint64_t u1, TCGCond condition)
  213. {
  214. bool result = false;
  215. int64_t i0 = u0;
  216. int64_t i1 = u1;
  217. switch (condition) {
  218. case TCG_COND_EQ:
  219. result = (u0 == u1);
  220. break;
  221. case TCG_COND_NE:
  222. result = (u0 != u1);
  223. break;
  224. case TCG_COND_LT:
  225. result = (i0 < i1);
  226. break;
  227. case TCG_COND_GE:
  228. result = (i0 >= i1);
  229. break;
  230. case TCG_COND_LE:
  231. result = (i0 <= i1);
  232. break;
  233. case TCG_COND_GT:
  234. result = (i0 > i1);
  235. break;
  236. case TCG_COND_LTU:
  237. result = (u0 < u1);
  238. break;
  239. case TCG_COND_GEU:
  240. result = (u0 >= u1);
  241. break;
  242. case TCG_COND_LEU:
  243. result = (u0 <= u1);
  244. break;
  245. case TCG_COND_GTU:
  246. result = (u0 > u1);
  247. break;
  248. case TCG_COND_TSTEQ:
  249. result = (u0 & u1) == 0;
  250. break;
  251. case TCG_COND_TSTNE:
  252. result = (u0 & u1) != 0;
  253. break;
  254. default:
  255. g_assert_not_reached();
  256. }
  257. return result;
  258. }
  259. static uint64_t tci_qemu_ld(CPUArchState *env, uint64_t taddr,
  260. MemOpIdx oi, const void *tb_ptr)
  261. {
  262. MemOp mop = get_memop(oi);
  263. uintptr_t ra = (uintptr_t)tb_ptr;
  264. switch (mop & MO_SSIZE) {
  265. case MO_UB:
  266. return helper_ldub_mmu(env, taddr, oi, ra);
  267. case MO_SB:
  268. return helper_ldsb_mmu(env, taddr, oi, ra);
  269. case MO_UW:
  270. return helper_lduw_mmu(env, taddr, oi, ra);
  271. case MO_SW:
  272. return helper_ldsw_mmu(env, taddr, oi, ra);
  273. case MO_UL:
  274. return helper_ldul_mmu(env, taddr, oi, ra);
  275. case MO_SL:
  276. return helper_ldsl_mmu(env, taddr, oi, ra);
  277. case MO_UQ:
  278. return helper_ldq_mmu(env, taddr, oi, ra);
  279. default:
  280. g_assert_not_reached();
  281. }
  282. }
  283. static void tci_qemu_st(CPUArchState *env, uint64_t taddr, uint64_t val,
  284. MemOpIdx oi, const void *tb_ptr)
  285. {
  286. MemOp mop = get_memop(oi);
  287. uintptr_t ra = (uintptr_t)tb_ptr;
  288. switch (mop & MO_SIZE) {
  289. case MO_UB:
  290. helper_stb_mmu(env, taddr, val, oi, ra);
  291. break;
  292. case MO_UW:
  293. helper_stw_mmu(env, taddr, val, oi, ra);
  294. break;
  295. case MO_UL:
  296. helper_stl_mmu(env, taddr, val, oi, ra);
  297. break;
  298. case MO_UQ:
  299. helper_stq_mmu(env, taddr, val, oi, ra);
  300. break;
  301. default:
  302. g_assert_not_reached();
  303. }
  304. }
  305. #if TCG_TARGET_REG_BITS == 64
  306. # define CASE_32_64(x) \
  307. case glue(glue(INDEX_op_, x), _i64): \
  308. case glue(glue(INDEX_op_, x), _i32):
  309. # define CASE_64(x) \
  310. case glue(glue(INDEX_op_, x), _i64):
  311. #else
  312. # define CASE_32_64(x) \
  313. case glue(glue(INDEX_op_, x), _i32):
  314. # define CASE_64(x)
  315. #endif
  316. /* Interpret pseudo code in tb. */
  317. /*
  318. * Disable CFI checks.
  319. * One possible operation in the pseudo code is a call to binary code.
  320. * Therefore, disable CFI checks in the interpreter function
  321. */
  322. uintptr_t QEMU_DISABLE_CFI tcg_qemu_tb_exec(CPUArchState *env,
  323. const void *v_tb_ptr)
  324. {
  325. const uint32_t *tb_ptr = v_tb_ptr;
  326. tcg_target_ulong regs[TCG_TARGET_NB_REGS];
  327. uint64_t stack[(TCG_STATIC_CALL_ARGS_SIZE + TCG_STATIC_FRAME_SIZE)
  328. / sizeof(uint64_t)];
  329. regs[TCG_AREG0] = (tcg_target_ulong)env;
  330. regs[TCG_REG_CALL_STACK] = (uintptr_t)stack;
  331. tci_assert(tb_ptr);
  332. for (;;) {
  333. uint32_t insn;
  334. TCGOpcode opc;
  335. TCGReg r0, r1, r2, r3, r4, r5;
  336. tcg_target_ulong t1;
  337. TCGCond condition;
  338. uint8_t pos, len;
  339. uint32_t tmp32;
  340. uint64_t tmp64, taddr;
  341. uint64_t T1, T2;
  342. MemOpIdx oi;
  343. int32_t ofs;
  344. void *ptr;
  345. insn = *tb_ptr++;
  346. opc = extract32(insn, 0, 8);
  347. switch (opc) {
  348. case INDEX_op_call:
  349. {
  350. void *call_slots[MAX_CALL_IARGS];
  351. ffi_cif *cif;
  352. void *func;
  353. unsigned i, s, n;
  354. tci_args_nl(insn, tb_ptr, &len, &ptr);
  355. func = ((void **)ptr)[0];
  356. cif = ((void **)ptr)[1];
  357. n = cif->nargs;
  358. for (i = s = 0; i < n; ++i) {
  359. ffi_type *t = cif->arg_types[i];
  360. call_slots[i] = &stack[s];
  361. s += DIV_ROUND_UP(t->size, 8);
  362. }
  363. /* Helper functions may need to access the "return address" */
  364. tci_tb_ptr = (uintptr_t)tb_ptr;
  365. ffi_call(cif, func, stack, call_slots);
  366. }
  367. switch (len) {
  368. case 0: /* void */
  369. break;
  370. case 1: /* uint32_t */
  371. /*
  372. * The result winds up "left-aligned" in the stack[0] slot.
  373. * Note that libffi has an odd special case in that it will
  374. * always widen an integral result to ffi_arg.
  375. */
  376. if (sizeof(ffi_arg) == 8) {
  377. regs[TCG_REG_R0] = (uint32_t)stack[0];
  378. } else {
  379. regs[TCG_REG_R0] = *(uint32_t *)stack;
  380. }
  381. break;
  382. case 2: /* uint64_t */
  383. /*
  384. * For TCG_TARGET_REG_BITS == 32, the register pair
  385. * must stay in host memory order.
  386. */
  387. memcpy(&regs[TCG_REG_R0], stack, 8);
  388. break;
  389. case 3: /* Int128 */
  390. memcpy(&regs[TCG_REG_R0], stack, 16);
  391. break;
  392. default:
  393. g_assert_not_reached();
  394. }
  395. break;
  396. case INDEX_op_br:
  397. tci_args_l(insn, tb_ptr, &ptr);
  398. tb_ptr = ptr;
  399. continue;
  400. case INDEX_op_setcond_i32:
  401. tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
  402. regs[r0] = tci_compare32(regs[r1], regs[r2], condition);
  403. break;
  404. case INDEX_op_movcond_i32:
  405. tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
  406. tmp32 = tci_compare32(regs[r1], regs[r2], condition);
  407. regs[r0] = regs[tmp32 ? r3 : r4];
  408. break;
  409. #if TCG_TARGET_REG_BITS == 32
  410. case INDEX_op_setcond2_i32:
  411. tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
  412. T1 = tci_uint64(regs[r2], regs[r1]);
  413. T2 = tci_uint64(regs[r4], regs[r3]);
  414. regs[r0] = tci_compare64(T1, T2, condition);
  415. break;
  416. #elif TCG_TARGET_REG_BITS == 64
  417. case INDEX_op_setcond_i64:
  418. tci_args_rrrc(insn, &r0, &r1, &r2, &condition);
  419. regs[r0] = tci_compare64(regs[r1], regs[r2], condition);
  420. break;
  421. case INDEX_op_movcond_i64:
  422. tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &condition);
  423. tmp32 = tci_compare64(regs[r1], regs[r2], condition);
  424. regs[r0] = regs[tmp32 ? r3 : r4];
  425. break;
  426. #endif
  427. CASE_32_64(mov)
  428. tci_args_rr(insn, &r0, &r1);
  429. regs[r0] = regs[r1];
  430. break;
  431. case INDEX_op_tci_movi:
  432. tci_args_ri(insn, &r0, &t1);
  433. regs[r0] = t1;
  434. break;
  435. case INDEX_op_tci_movl:
  436. tci_args_rl(insn, tb_ptr, &r0, &ptr);
  437. regs[r0] = *(tcg_target_ulong *)ptr;
  438. break;
  439. /* Load/store operations (32 bit). */
  440. CASE_32_64(ld8u)
  441. tci_args_rrs(insn, &r0, &r1, &ofs);
  442. ptr = (void *)(regs[r1] + ofs);
  443. regs[r0] = *(uint8_t *)ptr;
  444. break;
  445. CASE_32_64(ld8s)
  446. tci_args_rrs(insn, &r0, &r1, &ofs);
  447. ptr = (void *)(regs[r1] + ofs);
  448. regs[r0] = *(int8_t *)ptr;
  449. break;
  450. CASE_32_64(ld16u)
  451. tci_args_rrs(insn, &r0, &r1, &ofs);
  452. ptr = (void *)(regs[r1] + ofs);
  453. regs[r0] = *(uint16_t *)ptr;
  454. break;
  455. CASE_32_64(ld16s)
  456. tci_args_rrs(insn, &r0, &r1, &ofs);
  457. ptr = (void *)(regs[r1] + ofs);
  458. regs[r0] = *(int16_t *)ptr;
  459. break;
  460. case INDEX_op_ld_i32:
  461. CASE_64(ld32u)
  462. tci_args_rrs(insn, &r0, &r1, &ofs);
  463. ptr = (void *)(regs[r1] + ofs);
  464. regs[r0] = *(uint32_t *)ptr;
  465. break;
  466. CASE_32_64(st8)
  467. tci_args_rrs(insn, &r0, &r1, &ofs);
  468. ptr = (void *)(regs[r1] + ofs);
  469. *(uint8_t *)ptr = regs[r0];
  470. break;
  471. CASE_32_64(st16)
  472. tci_args_rrs(insn, &r0, &r1, &ofs);
  473. ptr = (void *)(regs[r1] + ofs);
  474. *(uint16_t *)ptr = regs[r0];
  475. break;
  476. case INDEX_op_st_i32:
  477. CASE_64(st32)
  478. tci_args_rrs(insn, &r0, &r1, &ofs);
  479. ptr = (void *)(regs[r1] + ofs);
  480. *(uint32_t *)ptr = regs[r0];
  481. break;
  482. /* Arithmetic operations (mixed 32/64 bit). */
  483. CASE_32_64(add)
  484. tci_args_rrr(insn, &r0, &r1, &r2);
  485. regs[r0] = regs[r1] + regs[r2];
  486. break;
  487. CASE_32_64(sub)
  488. tci_args_rrr(insn, &r0, &r1, &r2);
  489. regs[r0] = regs[r1] - regs[r2];
  490. break;
  491. CASE_32_64(mul)
  492. tci_args_rrr(insn, &r0, &r1, &r2);
  493. regs[r0] = regs[r1] * regs[r2];
  494. break;
  495. CASE_32_64(and)
  496. tci_args_rrr(insn, &r0, &r1, &r2);
  497. regs[r0] = regs[r1] & regs[r2];
  498. break;
  499. CASE_32_64(or)
  500. tci_args_rrr(insn, &r0, &r1, &r2);
  501. regs[r0] = regs[r1] | regs[r2];
  502. break;
  503. CASE_32_64(xor)
  504. tci_args_rrr(insn, &r0, &r1, &r2);
  505. regs[r0] = regs[r1] ^ regs[r2];
  506. break;
  507. #if TCG_TARGET_HAS_andc_i32 || TCG_TARGET_HAS_andc_i64
  508. CASE_32_64(andc)
  509. tci_args_rrr(insn, &r0, &r1, &r2);
  510. regs[r0] = regs[r1] & ~regs[r2];
  511. break;
  512. #endif
  513. #if TCG_TARGET_HAS_orc_i32 || TCG_TARGET_HAS_orc_i64
  514. CASE_32_64(orc)
  515. tci_args_rrr(insn, &r0, &r1, &r2);
  516. regs[r0] = regs[r1] | ~regs[r2];
  517. break;
  518. #endif
  519. #if TCG_TARGET_HAS_eqv_i32 || TCG_TARGET_HAS_eqv_i64
  520. CASE_32_64(eqv)
  521. tci_args_rrr(insn, &r0, &r1, &r2);
  522. regs[r0] = ~(regs[r1] ^ regs[r2]);
  523. break;
  524. #endif
  525. #if TCG_TARGET_HAS_nand_i32 || TCG_TARGET_HAS_nand_i64
  526. CASE_32_64(nand)
  527. tci_args_rrr(insn, &r0, &r1, &r2);
  528. regs[r0] = ~(regs[r1] & regs[r2]);
  529. break;
  530. #endif
  531. #if TCG_TARGET_HAS_nor_i32 || TCG_TARGET_HAS_nor_i64
  532. CASE_32_64(nor)
  533. tci_args_rrr(insn, &r0, &r1, &r2);
  534. regs[r0] = ~(regs[r1] | regs[r2]);
  535. break;
  536. #endif
  537. /* Arithmetic operations (32 bit). */
  538. case INDEX_op_div_i32:
  539. tci_args_rrr(insn, &r0, &r1, &r2);
  540. regs[r0] = (int32_t)regs[r1] / (int32_t)regs[r2];
  541. break;
  542. case INDEX_op_divu_i32:
  543. tci_args_rrr(insn, &r0, &r1, &r2);
  544. regs[r0] = (uint32_t)regs[r1] / (uint32_t)regs[r2];
  545. break;
  546. case INDEX_op_rem_i32:
  547. tci_args_rrr(insn, &r0, &r1, &r2);
  548. regs[r0] = (int32_t)regs[r1] % (int32_t)regs[r2];
  549. break;
  550. case INDEX_op_remu_i32:
  551. tci_args_rrr(insn, &r0, &r1, &r2);
  552. regs[r0] = (uint32_t)regs[r1] % (uint32_t)regs[r2];
  553. break;
  554. #if TCG_TARGET_HAS_clz_i32
  555. case INDEX_op_clz_i32:
  556. tci_args_rrr(insn, &r0, &r1, &r2);
  557. tmp32 = regs[r1];
  558. regs[r0] = tmp32 ? clz32(tmp32) : regs[r2];
  559. break;
  560. #endif
  561. #if TCG_TARGET_HAS_ctz_i32
  562. case INDEX_op_ctz_i32:
  563. tci_args_rrr(insn, &r0, &r1, &r2);
  564. tmp32 = regs[r1];
  565. regs[r0] = tmp32 ? ctz32(tmp32) : regs[r2];
  566. break;
  567. #endif
  568. #if TCG_TARGET_HAS_ctpop_i32
  569. case INDEX_op_ctpop_i32:
  570. tci_args_rr(insn, &r0, &r1);
  571. regs[r0] = ctpop32(regs[r1]);
  572. break;
  573. #endif
  574. /* Shift/rotate operations (32 bit). */
  575. case INDEX_op_shl_i32:
  576. tci_args_rrr(insn, &r0, &r1, &r2);
  577. regs[r0] = (uint32_t)regs[r1] << (regs[r2] & 31);
  578. break;
  579. case INDEX_op_shr_i32:
  580. tci_args_rrr(insn, &r0, &r1, &r2);
  581. regs[r0] = (uint32_t)regs[r1] >> (regs[r2] & 31);
  582. break;
  583. case INDEX_op_sar_i32:
  584. tci_args_rrr(insn, &r0, &r1, &r2);
  585. regs[r0] = (int32_t)regs[r1] >> (regs[r2] & 31);
  586. break;
  587. #if TCG_TARGET_HAS_rot_i32
  588. case INDEX_op_rotl_i32:
  589. tci_args_rrr(insn, &r0, &r1, &r2);
  590. regs[r0] = rol32(regs[r1], regs[r2] & 31);
  591. break;
  592. case INDEX_op_rotr_i32:
  593. tci_args_rrr(insn, &r0, &r1, &r2);
  594. regs[r0] = ror32(regs[r1], regs[r2] & 31);
  595. break;
  596. #endif
  597. case INDEX_op_deposit_i32:
  598. tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
  599. regs[r0] = deposit32(regs[r1], pos, len, regs[r2]);
  600. break;
  601. case INDEX_op_extract_i32:
  602. tci_args_rrbb(insn, &r0, &r1, &pos, &len);
  603. regs[r0] = extract32(regs[r1], pos, len);
  604. break;
  605. case INDEX_op_sextract_i32:
  606. tci_args_rrbb(insn, &r0, &r1, &pos, &len);
  607. regs[r0] = sextract32(regs[r1], pos, len);
  608. break;
  609. case INDEX_op_brcond_i32:
  610. tci_args_rl(insn, tb_ptr, &r0, &ptr);
  611. if ((uint32_t)regs[r0]) {
  612. tb_ptr = ptr;
  613. }
  614. break;
  615. #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_add2_i32
  616. case INDEX_op_add2_i32:
  617. tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
  618. T1 = tci_uint64(regs[r3], regs[r2]);
  619. T2 = tci_uint64(regs[r5], regs[r4]);
  620. tci_write_reg64(regs, r1, r0, T1 + T2);
  621. break;
  622. #endif
  623. #if TCG_TARGET_REG_BITS == 32 || TCG_TARGET_HAS_sub2_i32
  624. case INDEX_op_sub2_i32:
  625. tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
  626. T1 = tci_uint64(regs[r3], regs[r2]);
  627. T2 = tci_uint64(regs[r5], regs[r4]);
  628. tci_write_reg64(regs, r1, r0, T1 - T2);
  629. break;
  630. #endif
  631. #if TCG_TARGET_HAS_mulu2_i32
  632. case INDEX_op_mulu2_i32:
  633. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  634. tmp64 = (uint64_t)(uint32_t)regs[r2] * (uint32_t)regs[r3];
  635. tci_write_reg64(regs, r1, r0, tmp64);
  636. break;
  637. #endif
  638. #if TCG_TARGET_HAS_muls2_i32
  639. case INDEX_op_muls2_i32:
  640. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  641. tmp64 = (int64_t)(int32_t)regs[r2] * (int32_t)regs[r3];
  642. tci_write_reg64(regs, r1, r0, tmp64);
  643. break;
  644. #endif
  645. #if TCG_TARGET_HAS_ext8s_i32 || TCG_TARGET_HAS_ext8s_i64
  646. CASE_32_64(ext8s)
  647. tci_args_rr(insn, &r0, &r1);
  648. regs[r0] = (int8_t)regs[r1];
  649. break;
  650. #endif
  651. #if TCG_TARGET_HAS_ext16s_i32 || TCG_TARGET_HAS_ext16s_i64 || \
  652. TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
  653. CASE_32_64(ext16s)
  654. tci_args_rr(insn, &r0, &r1);
  655. regs[r0] = (int16_t)regs[r1];
  656. break;
  657. #endif
  658. #if TCG_TARGET_HAS_ext8u_i32 || TCG_TARGET_HAS_ext8u_i64
  659. CASE_32_64(ext8u)
  660. tci_args_rr(insn, &r0, &r1);
  661. regs[r0] = (uint8_t)regs[r1];
  662. break;
  663. #endif
  664. #if TCG_TARGET_HAS_ext16u_i32 || TCG_TARGET_HAS_ext16u_i64
  665. CASE_32_64(ext16u)
  666. tci_args_rr(insn, &r0, &r1);
  667. regs[r0] = (uint16_t)regs[r1];
  668. break;
  669. #endif
  670. #if TCG_TARGET_HAS_bswap16_i32 || TCG_TARGET_HAS_bswap16_i64
  671. CASE_32_64(bswap16)
  672. tci_args_rr(insn, &r0, &r1);
  673. regs[r0] = bswap16(regs[r1]);
  674. break;
  675. #endif
  676. #if TCG_TARGET_HAS_bswap32_i32 || TCG_TARGET_HAS_bswap32_i64
  677. CASE_32_64(bswap32)
  678. tci_args_rr(insn, &r0, &r1);
  679. regs[r0] = bswap32(regs[r1]);
  680. break;
  681. #endif
  682. #if TCG_TARGET_HAS_not_i32 || TCG_TARGET_HAS_not_i64
  683. CASE_32_64(not)
  684. tci_args_rr(insn, &r0, &r1);
  685. regs[r0] = ~regs[r1];
  686. break;
  687. #endif
  688. CASE_32_64(neg)
  689. tci_args_rr(insn, &r0, &r1);
  690. regs[r0] = -regs[r1];
  691. break;
  692. #if TCG_TARGET_REG_BITS == 64
  693. /* Load/store operations (64 bit). */
  694. case INDEX_op_ld32s_i64:
  695. tci_args_rrs(insn, &r0, &r1, &ofs);
  696. ptr = (void *)(regs[r1] + ofs);
  697. regs[r0] = *(int32_t *)ptr;
  698. break;
  699. case INDEX_op_ld_i64:
  700. tci_args_rrs(insn, &r0, &r1, &ofs);
  701. ptr = (void *)(regs[r1] + ofs);
  702. regs[r0] = *(uint64_t *)ptr;
  703. break;
  704. case INDEX_op_st_i64:
  705. tci_args_rrs(insn, &r0, &r1, &ofs);
  706. ptr = (void *)(regs[r1] + ofs);
  707. *(uint64_t *)ptr = regs[r0];
  708. break;
  709. /* Arithmetic operations (64 bit). */
  710. case INDEX_op_div_i64:
  711. tci_args_rrr(insn, &r0, &r1, &r2);
  712. regs[r0] = (int64_t)regs[r1] / (int64_t)regs[r2];
  713. break;
  714. case INDEX_op_divu_i64:
  715. tci_args_rrr(insn, &r0, &r1, &r2);
  716. regs[r0] = (uint64_t)regs[r1] / (uint64_t)regs[r2];
  717. break;
  718. case INDEX_op_rem_i64:
  719. tci_args_rrr(insn, &r0, &r1, &r2);
  720. regs[r0] = (int64_t)regs[r1] % (int64_t)regs[r2];
  721. break;
  722. case INDEX_op_remu_i64:
  723. tci_args_rrr(insn, &r0, &r1, &r2);
  724. regs[r0] = (uint64_t)regs[r1] % (uint64_t)regs[r2];
  725. break;
  726. #if TCG_TARGET_HAS_clz_i64
  727. case INDEX_op_clz_i64:
  728. tci_args_rrr(insn, &r0, &r1, &r2);
  729. regs[r0] = regs[r1] ? clz64(regs[r1]) : regs[r2];
  730. break;
  731. #endif
  732. #if TCG_TARGET_HAS_ctz_i64
  733. case INDEX_op_ctz_i64:
  734. tci_args_rrr(insn, &r0, &r1, &r2);
  735. regs[r0] = regs[r1] ? ctz64(regs[r1]) : regs[r2];
  736. break;
  737. #endif
  738. #if TCG_TARGET_HAS_ctpop_i64
  739. case INDEX_op_ctpop_i64:
  740. tci_args_rr(insn, &r0, &r1);
  741. regs[r0] = ctpop64(regs[r1]);
  742. break;
  743. #endif
  744. #if TCG_TARGET_HAS_mulu2_i64
  745. case INDEX_op_mulu2_i64:
  746. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  747. mulu64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
  748. break;
  749. #endif
  750. #if TCG_TARGET_HAS_muls2_i64
  751. case INDEX_op_muls2_i64:
  752. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  753. muls64(&regs[r0], &regs[r1], regs[r2], regs[r3]);
  754. break;
  755. #endif
  756. #if TCG_TARGET_HAS_add2_i64
  757. case INDEX_op_add2_i64:
  758. tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
  759. T1 = regs[r2] + regs[r4];
  760. T2 = regs[r3] + regs[r5] + (T1 < regs[r2]);
  761. regs[r0] = T1;
  762. regs[r1] = T2;
  763. break;
  764. #endif
  765. #if TCG_TARGET_HAS_add2_i64
  766. case INDEX_op_sub2_i64:
  767. tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
  768. T1 = regs[r2] - regs[r4];
  769. T2 = regs[r3] - regs[r5] - (regs[r2] < regs[r4]);
  770. regs[r0] = T1;
  771. regs[r1] = T2;
  772. break;
  773. #endif
  774. /* Shift/rotate operations (64 bit). */
  775. case INDEX_op_shl_i64:
  776. tci_args_rrr(insn, &r0, &r1, &r2);
  777. regs[r0] = regs[r1] << (regs[r2] & 63);
  778. break;
  779. case INDEX_op_shr_i64:
  780. tci_args_rrr(insn, &r0, &r1, &r2);
  781. regs[r0] = regs[r1] >> (regs[r2] & 63);
  782. break;
  783. case INDEX_op_sar_i64:
  784. tci_args_rrr(insn, &r0, &r1, &r2);
  785. regs[r0] = (int64_t)regs[r1] >> (regs[r2] & 63);
  786. break;
  787. #if TCG_TARGET_HAS_rot_i64
  788. case INDEX_op_rotl_i64:
  789. tci_args_rrr(insn, &r0, &r1, &r2);
  790. regs[r0] = rol64(regs[r1], regs[r2] & 63);
  791. break;
  792. case INDEX_op_rotr_i64:
  793. tci_args_rrr(insn, &r0, &r1, &r2);
  794. regs[r0] = ror64(regs[r1], regs[r2] & 63);
  795. break;
  796. #endif
  797. case INDEX_op_deposit_i64:
  798. tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
  799. regs[r0] = deposit64(regs[r1], pos, len, regs[r2]);
  800. break;
  801. case INDEX_op_extract_i64:
  802. tci_args_rrbb(insn, &r0, &r1, &pos, &len);
  803. regs[r0] = extract64(regs[r1], pos, len);
  804. break;
  805. case INDEX_op_sextract_i64:
  806. tci_args_rrbb(insn, &r0, &r1, &pos, &len);
  807. regs[r0] = sextract64(regs[r1], pos, len);
  808. break;
  809. case INDEX_op_brcond_i64:
  810. tci_args_rl(insn, tb_ptr, &r0, &ptr);
  811. if (regs[r0]) {
  812. tb_ptr = ptr;
  813. }
  814. break;
  815. case INDEX_op_ext32s_i64:
  816. case INDEX_op_ext_i32_i64:
  817. tci_args_rr(insn, &r0, &r1);
  818. regs[r0] = (int32_t)regs[r1];
  819. break;
  820. case INDEX_op_ext32u_i64:
  821. case INDEX_op_extu_i32_i64:
  822. tci_args_rr(insn, &r0, &r1);
  823. regs[r0] = (uint32_t)regs[r1];
  824. break;
  825. #if TCG_TARGET_HAS_bswap64_i64
  826. case INDEX_op_bswap64_i64:
  827. tci_args_rr(insn, &r0, &r1);
  828. regs[r0] = bswap64(regs[r1]);
  829. break;
  830. #endif
  831. #endif /* TCG_TARGET_REG_BITS == 64 */
  832. /* QEMU specific operations. */
  833. case INDEX_op_exit_tb:
  834. tci_args_l(insn, tb_ptr, &ptr);
  835. return (uintptr_t)ptr;
  836. case INDEX_op_goto_tb:
  837. tci_args_l(insn, tb_ptr, &ptr);
  838. tb_ptr = *(void **)ptr;
  839. break;
  840. case INDEX_op_goto_ptr:
  841. tci_args_r(insn, &r0);
  842. ptr = (void *)regs[r0];
  843. if (!ptr) {
  844. return 0;
  845. }
  846. tb_ptr = ptr;
  847. break;
  848. case INDEX_op_qemu_ld_i32:
  849. tci_args_rrm(insn, &r0, &r1, &oi);
  850. taddr = regs[r1];
  851. regs[r0] = tci_qemu_ld(env, taddr, oi, tb_ptr);
  852. break;
  853. case INDEX_op_qemu_ld_i64:
  854. if (TCG_TARGET_REG_BITS == 64) {
  855. tci_args_rrm(insn, &r0, &r1, &oi);
  856. taddr = regs[r1];
  857. } else {
  858. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  859. taddr = regs[r2];
  860. oi = regs[r3];
  861. }
  862. tmp64 = tci_qemu_ld(env, taddr, oi, tb_ptr);
  863. if (TCG_TARGET_REG_BITS == 32) {
  864. tci_write_reg64(regs, r1, r0, tmp64);
  865. } else {
  866. regs[r0] = tmp64;
  867. }
  868. break;
  869. case INDEX_op_qemu_st_i32:
  870. tci_args_rrm(insn, &r0, &r1, &oi);
  871. taddr = regs[r1];
  872. tci_qemu_st(env, taddr, regs[r0], oi, tb_ptr);
  873. break;
  874. case INDEX_op_qemu_st_i64:
  875. if (TCG_TARGET_REG_BITS == 64) {
  876. tci_args_rrm(insn, &r0, &r1, &oi);
  877. tmp64 = regs[r0];
  878. taddr = regs[r1];
  879. } else {
  880. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  881. tmp64 = tci_uint64(regs[r1], regs[r0]);
  882. taddr = regs[r2];
  883. oi = regs[r3];
  884. }
  885. tci_qemu_st(env, taddr, tmp64, oi, tb_ptr);
  886. break;
  887. case INDEX_op_mb:
  888. /* Ensure ordering for all kinds */
  889. smp_mb();
  890. break;
  891. default:
  892. g_assert_not_reached();
  893. }
  894. }
  895. }
  896. /*
  897. * Disassembler that matches the interpreter
  898. */
  899. static const char *str_r(TCGReg r)
  900. {
  901. static const char regs[TCG_TARGET_NB_REGS][4] = {
  902. "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r7",
  903. "r8", "r9", "r10", "r11", "r12", "r13", "env", "sp"
  904. };
  905. QEMU_BUILD_BUG_ON(TCG_AREG0 != TCG_REG_R14);
  906. QEMU_BUILD_BUG_ON(TCG_REG_CALL_STACK != TCG_REG_R15);
  907. assert((unsigned)r < TCG_TARGET_NB_REGS);
  908. return regs[r];
  909. }
  910. static const char *str_c(TCGCond c)
  911. {
  912. static const char cond[16][8] = {
  913. [TCG_COND_NEVER] = "never",
  914. [TCG_COND_ALWAYS] = "always",
  915. [TCG_COND_EQ] = "eq",
  916. [TCG_COND_NE] = "ne",
  917. [TCG_COND_LT] = "lt",
  918. [TCG_COND_GE] = "ge",
  919. [TCG_COND_LE] = "le",
  920. [TCG_COND_GT] = "gt",
  921. [TCG_COND_LTU] = "ltu",
  922. [TCG_COND_GEU] = "geu",
  923. [TCG_COND_LEU] = "leu",
  924. [TCG_COND_GTU] = "gtu",
  925. [TCG_COND_TSTEQ] = "tsteq",
  926. [TCG_COND_TSTNE] = "tstne",
  927. };
  928. assert((unsigned)c < ARRAY_SIZE(cond));
  929. assert(cond[c][0] != 0);
  930. return cond[c];
  931. }
  932. /* Disassemble TCI bytecode. */
  933. int print_insn_tci(bfd_vma addr, disassemble_info *info)
  934. {
  935. const uint32_t *tb_ptr = (const void *)(uintptr_t)addr;
  936. const TCGOpDef *def;
  937. const char *op_name;
  938. uint32_t insn;
  939. TCGOpcode op;
  940. TCGReg r0, r1, r2, r3, r4, r5;
  941. tcg_target_ulong i1;
  942. int32_t s2;
  943. TCGCond c;
  944. MemOpIdx oi;
  945. uint8_t pos, len;
  946. void *ptr;
  947. /* TCI is always the host, so we don't need to load indirect. */
  948. insn = *tb_ptr++;
  949. info->fprintf_func(info->stream, "%08x ", insn);
  950. op = extract32(insn, 0, 8);
  951. def = &tcg_op_defs[op];
  952. op_name = def->name;
  953. switch (op) {
  954. case INDEX_op_br:
  955. case INDEX_op_exit_tb:
  956. case INDEX_op_goto_tb:
  957. tci_args_l(insn, tb_ptr, &ptr);
  958. info->fprintf_func(info->stream, "%-12s %p", op_name, ptr);
  959. break;
  960. case INDEX_op_goto_ptr:
  961. tci_args_r(insn, &r0);
  962. info->fprintf_func(info->stream, "%-12s %s", op_name, str_r(r0));
  963. break;
  964. case INDEX_op_call:
  965. tci_args_nl(insn, tb_ptr, &len, &ptr);
  966. info->fprintf_func(info->stream, "%-12s %d, %p", op_name, len, ptr);
  967. break;
  968. case INDEX_op_brcond_i32:
  969. case INDEX_op_brcond_i64:
  970. tci_args_rl(insn, tb_ptr, &r0, &ptr);
  971. info->fprintf_func(info->stream, "%-12s %s, 0, ne, %p",
  972. op_name, str_r(r0), ptr);
  973. break;
  974. case INDEX_op_setcond_i32:
  975. case INDEX_op_setcond_i64:
  976. tci_args_rrrc(insn, &r0, &r1, &r2, &c);
  977. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
  978. op_name, str_r(r0), str_r(r1), str_r(r2), str_c(c));
  979. break;
  980. case INDEX_op_tci_movi:
  981. tci_args_ri(insn, &r0, &i1);
  982. info->fprintf_func(info->stream, "%-12s %s, 0x%" TCG_PRIlx,
  983. op_name, str_r(r0), i1);
  984. break;
  985. case INDEX_op_tci_movl:
  986. tci_args_rl(insn, tb_ptr, &r0, &ptr);
  987. info->fprintf_func(info->stream, "%-12s %s, %p",
  988. op_name, str_r(r0), ptr);
  989. break;
  990. case INDEX_op_ld8u_i32:
  991. case INDEX_op_ld8u_i64:
  992. case INDEX_op_ld8s_i32:
  993. case INDEX_op_ld8s_i64:
  994. case INDEX_op_ld16u_i32:
  995. case INDEX_op_ld16u_i64:
  996. case INDEX_op_ld16s_i32:
  997. case INDEX_op_ld16s_i64:
  998. case INDEX_op_ld32u_i64:
  999. case INDEX_op_ld32s_i64:
  1000. case INDEX_op_ld_i32:
  1001. case INDEX_op_ld_i64:
  1002. case INDEX_op_st8_i32:
  1003. case INDEX_op_st8_i64:
  1004. case INDEX_op_st16_i32:
  1005. case INDEX_op_st16_i64:
  1006. case INDEX_op_st32_i64:
  1007. case INDEX_op_st_i32:
  1008. case INDEX_op_st_i64:
  1009. tci_args_rrs(insn, &r0, &r1, &s2);
  1010. info->fprintf_func(info->stream, "%-12s %s, %s, %d",
  1011. op_name, str_r(r0), str_r(r1), s2);
  1012. break;
  1013. case INDEX_op_mov_i32:
  1014. case INDEX_op_mov_i64:
  1015. case INDEX_op_ext8s_i32:
  1016. case INDEX_op_ext8s_i64:
  1017. case INDEX_op_ext8u_i32:
  1018. case INDEX_op_ext8u_i64:
  1019. case INDEX_op_ext16s_i32:
  1020. case INDEX_op_ext16s_i64:
  1021. case INDEX_op_ext16u_i32:
  1022. case INDEX_op_ext32s_i64:
  1023. case INDEX_op_ext32u_i64:
  1024. case INDEX_op_ext_i32_i64:
  1025. case INDEX_op_extu_i32_i64:
  1026. case INDEX_op_bswap16_i32:
  1027. case INDEX_op_bswap16_i64:
  1028. case INDEX_op_bswap32_i32:
  1029. case INDEX_op_bswap32_i64:
  1030. case INDEX_op_bswap64_i64:
  1031. case INDEX_op_not_i32:
  1032. case INDEX_op_not_i64:
  1033. case INDEX_op_neg_i32:
  1034. case INDEX_op_neg_i64:
  1035. case INDEX_op_ctpop_i32:
  1036. case INDEX_op_ctpop_i64:
  1037. tci_args_rr(insn, &r0, &r1);
  1038. info->fprintf_func(info->stream, "%-12s %s, %s",
  1039. op_name, str_r(r0), str_r(r1));
  1040. break;
  1041. case INDEX_op_add_i32:
  1042. case INDEX_op_add_i64:
  1043. case INDEX_op_sub_i32:
  1044. case INDEX_op_sub_i64:
  1045. case INDEX_op_mul_i32:
  1046. case INDEX_op_mul_i64:
  1047. case INDEX_op_and_i32:
  1048. case INDEX_op_and_i64:
  1049. case INDEX_op_or_i32:
  1050. case INDEX_op_or_i64:
  1051. case INDEX_op_xor_i32:
  1052. case INDEX_op_xor_i64:
  1053. case INDEX_op_andc_i32:
  1054. case INDEX_op_andc_i64:
  1055. case INDEX_op_orc_i32:
  1056. case INDEX_op_orc_i64:
  1057. case INDEX_op_eqv_i32:
  1058. case INDEX_op_eqv_i64:
  1059. case INDEX_op_nand_i32:
  1060. case INDEX_op_nand_i64:
  1061. case INDEX_op_nor_i32:
  1062. case INDEX_op_nor_i64:
  1063. case INDEX_op_div_i32:
  1064. case INDEX_op_div_i64:
  1065. case INDEX_op_rem_i32:
  1066. case INDEX_op_rem_i64:
  1067. case INDEX_op_divu_i32:
  1068. case INDEX_op_divu_i64:
  1069. case INDEX_op_remu_i32:
  1070. case INDEX_op_remu_i64:
  1071. case INDEX_op_shl_i32:
  1072. case INDEX_op_shl_i64:
  1073. case INDEX_op_shr_i32:
  1074. case INDEX_op_shr_i64:
  1075. case INDEX_op_sar_i32:
  1076. case INDEX_op_sar_i64:
  1077. case INDEX_op_rotl_i32:
  1078. case INDEX_op_rotl_i64:
  1079. case INDEX_op_rotr_i32:
  1080. case INDEX_op_rotr_i64:
  1081. case INDEX_op_clz_i32:
  1082. case INDEX_op_clz_i64:
  1083. case INDEX_op_ctz_i32:
  1084. case INDEX_op_ctz_i64:
  1085. tci_args_rrr(insn, &r0, &r1, &r2);
  1086. info->fprintf_func(info->stream, "%-12s %s, %s, %s",
  1087. op_name, str_r(r0), str_r(r1), str_r(r2));
  1088. break;
  1089. case INDEX_op_deposit_i32:
  1090. case INDEX_op_deposit_i64:
  1091. tci_args_rrrbb(insn, &r0, &r1, &r2, &pos, &len);
  1092. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %d, %d",
  1093. op_name, str_r(r0), str_r(r1), str_r(r2), pos, len);
  1094. break;
  1095. case INDEX_op_extract_i32:
  1096. case INDEX_op_extract_i64:
  1097. case INDEX_op_sextract_i32:
  1098. case INDEX_op_sextract_i64:
  1099. tci_args_rrbb(insn, &r0, &r1, &pos, &len);
  1100. info->fprintf_func(info->stream, "%-12s %s,%s,%d,%d",
  1101. op_name, str_r(r0), str_r(r1), pos, len);
  1102. break;
  1103. case INDEX_op_movcond_i32:
  1104. case INDEX_op_movcond_i64:
  1105. case INDEX_op_setcond2_i32:
  1106. tci_args_rrrrrc(insn, &r0, &r1, &r2, &r3, &r4, &c);
  1107. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
  1108. op_name, str_r(r0), str_r(r1), str_r(r2),
  1109. str_r(r3), str_r(r4), str_c(c));
  1110. break;
  1111. case INDEX_op_mulu2_i32:
  1112. case INDEX_op_mulu2_i64:
  1113. case INDEX_op_muls2_i32:
  1114. case INDEX_op_muls2_i64:
  1115. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  1116. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
  1117. op_name, str_r(r0), str_r(r1),
  1118. str_r(r2), str_r(r3));
  1119. break;
  1120. case INDEX_op_add2_i32:
  1121. case INDEX_op_add2_i64:
  1122. case INDEX_op_sub2_i32:
  1123. case INDEX_op_sub2_i64:
  1124. tci_args_rrrrrr(insn, &r0, &r1, &r2, &r3, &r4, &r5);
  1125. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s, %s, %s",
  1126. op_name, str_r(r0), str_r(r1), str_r(r2),
  1127. str_r(r3), str_r(r4), str_r(r5));
  1128. break;
  1129. case INDEX_op_qemu_ld_i64:
  1130. case INDEX_op_qemu_st_i64:
  1131. if (TCG_TARGET_REG_BITS == 32) {
  1132. tci_args_rrrr(insn, &r0, &r1, &r2, &r3);
  1133. info->fprintf_func(info->stream, "%-12s %s, %s, %s, %s",
  1134. op_name, str_r(r0), str_r(r1),
  1135. str_r(r2), str_r(r3));
  1136. break;
  1137. }
  1138. /* fall through */
  1139. case INDEX_op_qemu_ld_i32:
  1140. case INDEX_op_qemu_st_i32:
  1141. tci_args_rrm(insn, &r0, &r1, &oi);
  1142. info->fprintf_func(info->stream, "%-12s %s, %s, %x",
  1143. op_name, str_r(r0), str_r(r1), oi);
  1144. break;
  1145. case 0:
  1146. /* tcg_out_nop_fill uses zeros */
  1147. if (insn == 0) {
  1148. info->fprintf_func(info->stream, "align");
  1149. break;
  1150. }
  1151. /* fall through */
  1152. default:
  1153. info->fprintf_func(info->stream, "illegal opcode %d", op);
  1154. break;
  1155. }
  1156. return sizeof(insn);
  1157. }