tcg-target.c.inc 55 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915
  1. /*
  2. * Tiny Code Generator for QEMU
  3. *
  4. * Copyright (c) 2018 SiFive, Inc
  5. * Copyright (c) 2008-2009 Arnaud Patard <arnaud.patard@rtp-net.org>
  6. * Copyright (c) 2009 Aurelien Jarno <aurelien@aurel32.net>
  7. * Copyright (c) 2008 Fabrice Bellard
  8. *
  9. * Based on i386/tcg-target.c and mips/tcg-target.c
  10. *
  11. * Permission is hereby granted, free of charge, to any person obtaining a copy
  12. * of this software and associated documentation files (the "Software"), to deal
  13. * in the Software without restriction, including without limitation the rights
  14. * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  15. * copies of the Software, and to permit persons to whom the Software is
  16. * furnished to do so, subject to the following conditions:
  17. *
  18. * The above copyright notice and this permission notice shall be included in
  19. * all copies or substantial portions of the Software.
  20. *
  21. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  22. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  23. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  24. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  25. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  26. * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
  27. * THE SOFTWARE.
  28. */
  29. #include "../tcg-pool.c.inc"
  30. #ifdef CONFIG_DEBUG_TCG
  31. static const char * const tcg_target_reg_names[TCG_TARGET_NB_REGS] = {
  32. "zero",
  33. "ra",
  34. "sp",
  35. "gp",
  36. "tp",
  37. "t0",
  38. "t1",
  39. "t2",
  40. "s0",
  41. "s1",
  42. "a0",
  43. "a1",
  44. "a2",
  45. "a3",
  46. "a4",
  47. "a5",
  48. "a6",
  49. "a7",
  50. "s2",
  51. "s3",
  52. "s4",
  53. "s5",
  54. "s6",
  55. "s7",
  56. "s8",
  57. "s9",
  58. "s10",
  59. "s11",
  60. "t3",
  61. "t4",
  62. "t5",
  63. "t6"
  64. };
  65. #endif
  66. static const int tcg_target_reg_alloc_order[] = {
  67. /* Call saved registers */
  68. /* TCG_REG_S0 reservered for TCG_AREG0 */
  69. TCG_REG_S1,
  70. TCG_REG_S2,
  71. TCG_REG_S3,
  72. TCG_REG_S4,
  73. TCG_REG_S5,
  74. TCG_REG_S6,
  75. TCG_REG_S7,
  76. TCG_REG_S8,
  77. TCG_REG_S9,
  78. TCG_REG_S10,
  79. TCG_REG_S11,
  80. /* Call clobbered registers */
  81. TCG_REG_T0,
  82. TCG_REG_T1,
  83. TCG_REG_T2,
  84. TCG_REG_T3,
  85. TCG_REG_T4,
  86. TCG_REG_T5,
  87. TCG_REG_T6,
  88. /* Argument registers */
  89. TCG_REG_A0,
  90. TCG_REG_A1,
  91. TCG_REG_A2,
  92. TCG_REG_A3,
  93. TCG_REG_A4,
  94. TCG_REG_A5,
  95. TCG_REG_A6,
  96. TCG_REG_A7,
  97. };
  98. static const int tcg_target_call_iarg_regs[] = {
  99. TCG_REG_A0,
  100. TCG_REG_A1,
  101. TCG_REG_A2,
  102. TCG_REG_A3,
  103. TCG_REG_A4,
  104. TCG_REG_A5,
  105. TCG_REG_A6,
  106. TCG_REG_A7,
  107. };
  108. static const int tcg_target_call_oarg_regs[] = {
  109. TCG_REG_A0,
  110. TCG_REG_A1,
  111. };
  112. #define TCG_CT_CONST_ZERO 0x100
  113. #define TCG_CT_CONST_S12 0x200
  114. #define TCG_CT_CONST_N12 0x400
  115. #define TCG_CT_CONST_M12 0x800
  116. static inline tcg_target_long sextreg(tcg_target_long val, int pos, int len)
  117. {
  118. if (TCG_TARGET_REG_BITS == 32) {
  119. return sextract32(val, pos, len);
  120. } else {
  121. return sextract64(val, pos, len);
  122. }
  123. }
  124. /* parse target specific constraints */
  125. static const char *target_parse_constraint(TCGArgConstraint *ct,
  126. const char *ct_str, TCGType type)
  127. {
  128. switch (*ct_str++) {
  129. case 'r':
  130. ct->ct |= TCG_CT_REG;
  131. ct->u.regs = 0xffffffff;
  132. break;
  133. case 'L':
  134. /* qemu_ld/qemu_st constraint */
  135. ct->ct |= TCG_CT_REG;
  136. ct->u.regs = 0xffffffff;
  137. /* qemu_ld/qemu_st uses TCG_REG_TMP0 */
  138. #if defined(CONFIG_SOFTMMU)
  139. tcg_regset_reset_reg(ct->u.regs, tcg_target_call_iarg_regs[0]);
  140. tcg_regset_reset_reg(ct->u.regs, tcg_target_call_iarg_regs[1]);
  141. tcg_regset_reset_reg(ct->u.regs, tcg_target_call_iarg_regs[2]);
  142. tcg_regset_reset_reg(ct->u.regs, tcg_target_call_iarg_regs[3]);
  143. tcg_regset_reset_reg(ct->u.regs, tcg_target_call_iarg_regs[4]);
  144. #endif
  145. break;
  146. case 'I':
  147. ct->ct |= TCG_CT_CONST_S12;
  148. break;
  149. case 'N':
  150. ct->ct |= TCG_CT_CONST_N12;
  151. break;
  152. case 'M':
  153. ct->ct |= TCG_CT_CONST_M12;
  154. break;
  155. case 'Z':
  156. /* we can use a zero immediate as a zero register argument. */
  157. ct->ct |= TCG_CT_CONST_ZERO;
  158. break;
  159. default:
  160. return NULL;
  161. }
  162. return ct_str;
  163. }
  164. /* test if a constant matches the constraint */
  165. static int tcg_target_const_match(tcg_target_long val, TCGType type,
  166. const TCGArgConstraint *arg_ct)
  167. {
  168. int ct = arg_ct->ct;
  169. if (ct & TCG_CT_CONST) {
  170. return 1;
  171. }
  172. if ((ct & TCG_CT_CONST_ZERO) && val == 0) {
  173. return 1;
  174. }
  175. if ((ct & TCG_CT_CONST_S12) && val == sextreg(val, 0, 12)) {
  176. return 1;
  177. }
  178. if ((ct & TCG_CT_CONST_N12) && -val == sextreg(-val, 0, 12)) {
  179. return 1;
  180. }
  181. if ((ct & TCG_CT_CONST_M12) && val >= -0xfff && val <= 0xfff) {
  182. return 1;
  183. }
  184. return 0;
  185. }
  186. /*
  187. * RISC-V Base ISA opcodes (IM)
  188. */
  189. typedef enum {
  190. OPC_ADD = 0x33,
  191. OPC_ADDI = 0x13,
  192. OPC_AND = 0x7033,
  193. OPC_ANDI = 0x7013,
  194. OPC_AUIPC = 0x17,
  195. OPC_BEQ = 0x63,
  196. OPC_BGE = 0x5063,
  197. OPC_BGEU = 0x7063,
  198. OPC_BLT = 0x4063,
  199. OPC_BLTU = 0x6063,
  200. OPC_BNE = 0x1063,
  201. OPC_DIV = 0x2004033,
  202. OPC_DIVU = 0x2005033,
  203. OPC_JAL = 0x6f,
  204. OPC_JALR = 0x67,
  205. OPC_LB = 0x3,
  206. OPC_LBU = 0x4003,
  207. OPC_LD = 0x3003,
  208. OPC_LH = 0x1003,
  209. OPC_LHU = 0x5003,
  210. OPC_LUI = 0x37,
  211. OPC_LW = 0x2003,
  212. OPC_LWU = 0x6003,
  213. OPC_MUL = 0x2000033,
  214. OPC_MULH = 0x2001033,
  215. OPC_MULHSU = 0x2002033,
  216. OPC_MULHU = 0x2003033,
  217. OPC_OR = 0x6033,
  218. OPC_ORI = 0x6013,
  219. OPC_REM = 0x2006033,
  220. OPC_REMU = 0x2007033,
  221. OPC_SB = 0x23,
  222. OPC_SD = 0x3023,
  223. OPC_SH = 0x1023,
  224. OPC_SLL = 0x1033,
  225. OPC_SLLI = 0x1013,
  226. OPC_SLT = 0x2033,
  227. OPC_SLTI = 0x2013,
  228. OPC_SLTIU = 0x3013,
  229. OPC_SLTU = 0x3033,
  230. OPC_SRA = 0x40005033,
  231. OPC_SRAI = 0x40005013,
  232. OPC_SRL = 0x5033,
  233. OPC_SRLI = 0x5013,
  234. OPC_SUB = 0x40000033,
  235. OPC_SW = 0x2023,
  236. OPC_XOR = 0x4033,
  237. OPC_XORI = 0x4013,
  238. #if TCG_TARGET_REG_BITS == 64
  239. OPC_ADDIW = 0x1b,
  240. OPC_ADDW = 0x3b,
  241. OPC_DIVUW = 0x200503b,
  242. OPC_DIVW = 0x200403b,
  243. OPC_MULW = 0x200003b,
  244. OPC_REMUW = 0x200703b,
  245. OPC_REMW = 0x200603b,
  246. OPC_SLLIW = 0x101b,
  247. OPC_SLLW = 0x103b,
  248. OPC_SRAIW = 0x4000501b,
  249. OPC_SRAW = 0x4000503b,
  250. OPC_SRLIW = 0x501b,
  251. OPC_SRLW = 0x503b,
  252. OPC_SUBW = 0x4000003b,
  253. #else
  254. /* Simplify code throughout by defining aliases for RV32. */
  255. OPC_ADDIW = OPC_ADDI,
  256. OPC_ADDW = OPC_ADD,
  257. OPC_DIVUW = OPC_DIVU,
  258. OPC_DIVW = OPC_DIV,
  259. OPC_MULW = OPC_MUL,
  260. OPC_REMUW = OPC_REMU,
  261. OPC_REMW = OPC_REM,
  262. OPC_SLLIW = OPC_SLLI,
  263. OPC_SLLW = OPC_SLL,
  264. OPC_SRAIW = OPC_SRAI,
  265. OPC_SRAW = OPC_SRA,
  266. OPC_SRLIW = OPC_SRLI,
  267. OPC_SRLW = OPC_SRL,
  268. OPC_SUBW = OPC_SUB,
  269. #endif
  270. OPC_FENCE = 0x0000000f,
  271. } RISCVInsn;
  272. /*
  273. * RISC-V immediate and instruction encoders (excludes 16-bit RVC)
  274. */
  275. /* Type-R */
  276. static int32_t encode_r(RISCVInsn opc, TCGReg rd, TCGReg rs1, TCGReg rs2)
  277. {
  278. return opc | (rd & 0x1f) << 7 | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20;
  279. }
  280. /* Type-I */
  281. static int32_t encode_imm12(uint32_t imm)
  282. {
  283. return (imm & 0xfff) << 20;
  284. }
  285. static int32_t encode_i(RISCVInsn opc, TCGReg rd, TCGReg rs1, uint32_t imm)
  286. {
  287. return opc | (rd & 0x1f) << 7 | (rs1 & 0x1f) << 15 | encode_imm12(imm);
  288. }
  289. /* Type-S */
  290. static int32_t encode_simm12(uint32_t imm)
  291. {
  292. int32_t ret = 0;
  293. ret |= (imm & 0xFE0) << 20;
  294. ret |= (imm & 0x1F) << 7;
  295. return ret;
  296. }
  297. static int32_t encode_s(RISCVInsn opc, TCGReg rs1, TCGReg rs2, uint32_t imm)
  298. {
  299. return opc | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20 | encode_simm12(imm);
  300. }
  301. /* Type-SB */
  302. static int32_t encode_sbimm12(uint32_t imm)
  303. {
  304. int32_t ret = 0;
  305. ret |= (imm & 0x1000) << 19;
  306. ret |= (imm & 0x7e0) << 20;
  307. ret |= (imm & 0x1e) << 7;
  308. ret |= (imm & 0x800) >> 4;
  309. return ret;
  310. }
  311. static int32_t encode_sb(RISCVInsn opc, TCGReg rs1, TCGReg rs2, uint32_t imm)
  312. {
  313. return opc | (rs1 & 0x1f) << 15 | (rs2 & 0x1f) << 20 | encode_sbimm12(imm);
  314. }
  315. /* Type-U */
  316. static int32_t encode_uimm20(uint32_t imm)
  317. {
  318. return imm & 0xfffff000;
  319. }
  320. static int32_t encode_u(RISCVInsn opc, TCGReg rd, uint32_t imm)
  321. {
  322. return opc | (rd & 0x1f) << 7 | encode_uimm20(imm);
  323. }
  324. /* Type-UJ */
  325. static int32_t encode_ujimm20(uint32_t imm)
  326. {
  327. int32_t ret = 0;
  328. ret |= (imm & 0x0007fe) << (21 - 1);
  329. ret |= (imm & 0x000800) << (20 - 11);
  330. ret |= (imm & 0x0ff000) << (12 - 12);
  331. ret |= (imm & 0x100000) << (31 - 20);
  332. return ret;
  333. }
  334. static int32_t encode_uj(RISCVInsn opc, TCGReg rd, uint32_t imm)
  335. {
  336. return opc | (rd & 0x1f) << 7 | encode_ujimm20(imm);
  337. }
  338. /*
  339. * RISC-V instruction emitters
  340. */
  341. static void tcg_out_opc_reg(TCGContext *s, RISCVInsn opc,
  342. TCGReg rd, TCGReg rs1, TCGReg rs2)
  343. {
  344. tcg_out32(s, encode_r(opc, rd, rs1, rs2));
  345. }
  346. static void tcg_out_opc_imm(TCGContext *s, RISCVInsn opc,
  347. TCGReg rd, TCGReg rs1, TCGArg imm)
  348. {
  349. tcg_out32(s, encode_i(opc, rd, rs1, imm));
  350. }
  351. static void tcg_out_opc_store(TCGContext *s, RISCVInsn opc,
  352. TCGReg rs1, TCGReg rs2, uint32_t imm)
  353. {
  354. tcg_out32(s, encode_s(opc, rs1, rs2, imm));
  355. }
  356. static void tcg_out_opc_branch(TCGContext *s, RISCVInsn opc,
  357. TCGReg rs1, TCGReg rs2, uint32_t imm)
  358. {
  359. tcg_out32(s, encode_sb(opc, rs1, rs2, imm));
  360. }
  361. static void tcg_out_opc_upper(TCGContext *s, RISCVInsn opc,
  362. TCGReg rd, uint32_t imm)
  363. {
  364. tcg_out32(s, encode_u(opc, rd, imm));
  365. }
  366. static void tcg_out_opc_jump(TCGContext *s, RISCVInsn opc,
  367. TCGReg rd, uint32_t imm)
  368. {
  369. tcg_out32(s, encode_uj(opc, rd, imm));
  370. }
  371. static void tcg_out_nop_fill(tcg_insn_unit *p, int count)
  372. {
  373. int i;
  374. for (i = 0; i < count; ++i) {
  375. p[i] = encode_i(OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
  376. }
  377. }
  378. /*
  379. * Relocations
  380. */
  381. static bool reloc_sbimm12(tcg_insn_unit *code_ptr, tcg_insn_unit *target)
  382. {
  383. intptr_t offset = (intptr_t)target - (intptr_t)code_ptr;
  384. if (offset == sextreg(offset, 1, 12) << 1) {
  385. code_ptr[0] |= encode_sbimm12(offset);
  386. return true;
  387. }
  388. return false;
  389. }
  390. static bool reloc_jimm20(tcg_insn_unit *code_ptr, tcg_insn_unit *target)
  391. {
  392. intptr_t offset = (intptr_t)target - (intptr_t)code_ptr;
  393. if (offset == sextreg(offset, 1, 20) << 1) {
  394. code_ptr[0] |= encode_ujimm20(offset);
  395. return true;
  396. }
  397. return false;
  398. }
  399. static bool reloc_call(tcg_insn_unit *code_ptr, tcg_insn_unit *target)
  400. {
  401. intptr_t offset = (intptr_t)target - (intptr_t)code_ptr;
  402. int32_t lo = sextreg(offset, 0, 12);
  403. int32_t hi = offset - lo;
  404. if (offset == hi + lo) {
  405. code_ptr[0] |= encode_uimm20(hi);
  406. code_ptr[1] |= encode_imm12(lo);
  407. return true;
  408. }
  409. return false;
  410. }
  411. static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
  412. intptr_t value, intptr_t addend)
  413. {
  414. uint32_t insn = *code_ptr;
  415. intptr_t diff;
  416. bool short_jmp;
  417. tcg_debug_assert(addend == 0);
  418. switch (type) {
  419. case R_RISCV_BRANCH:
  420. diff = value - (uintptr_t)code_ptr;
  421. short_jmp = diff == sextreg(diff, 0, 12);
  422. if (short_jmp) {
  423. return reloc_sbimm12(code_ptr, (tcg_insn_unit *)value);
  424. } else {
  425. /* Invert the condition */
  426. insn = insn ^ (1 << 12);
  427. /* Clear the offset */
  428. insn &= 0x01fff07f;
  429. /* Set the offset to the PC + 8 */
  430. insn |= encode_sbimm12(8);
  431. /* Move forward */
  432. code_ptr[0] = insn;
  433. /* Overwrite the NOP with jal x0,value */
  434. diff = value - (uintptr_t)(code_ptr + 1);
  435. insn = encode_uj(OPC_JAL, TCG_REG_ZERO, diff);
  436. code_ptr[1] = insn;
  437. return true;
  438. }
  439. break;
  440. case R_RISCV_JAL:
  441. return reloc_jimm20(code_ptr, (tcg_insn_unit *)value);
  442. case R_RISCV_CALL:
  443. return reloc_call(code_ptr, (tcg_insn_unit *)value);
  444. default:
  445. tcg_abort();
  446. }
  447. }
  448. /*
  449. * TCG intrinsics
  450. */
  451. static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
  452. {
  453. if (ret == arg) {
  454. return true;
  455. }
  456. switch (type) {
  457. case TCG_TYPE_I32:
  458. case TCG_TYPE_I64:
  459. tcg_out_opc_imm(s, OPC_ADDI, ret, arg, 0);
  460. break;
  461. default:
  462. g_assert_not_reached();
  463. }
  464. return true;
  465. }
  466. static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
  467. tcg_target_long val)
  468. {
  469. tcg_target_long lo, hi, tmp;
  470. int shift, ret;
  471. if (TCG_TARGET_REG_BITS == 64 && type == TCG_TYPE_I32) {
  472. val = (int32_t)val;
  473. }
  474. lo = sextreg(val, 0, 12);
  475. if (val == lo) {
  476. tcg_out_opc_imm(s, OPC_ADDI, rd, TCG_REG_ZERO, lo);
  477. return;
  478. }
  479. hi = val - lo;
  480. if (TCG_TARGET_REG_BITS == 32 || val == (int32_t)val) {
  481. tcg_out_opc_upper(s, OPC_LUI, rd, hi);
  482. if (lo != 0) {
  483. tcg_out_opc_imm(s, OPC_ADDIW, rd, rd, lo);
  484. }
  485. return;
  486. }
  487. /* We can only be here if TCG_TARGET_REG_BITS != 32 */
  488. tmp = tcg_pcrel_diff(s, (void *)val);
  489. if (tmp == (int32_t)tmp) {
  490. tcg_out_opc_upper(s, OPC_AUIPC, rd, 0);
  491. tcg_out_opc_imm(s, OPC_ADDI, rd, rd, 0);
  492. ret = reloc_call(s->code_ptr - 2, (tcg_insn_unit *)val);
  493. tcg_debug_assert(ret == true);
  494. return;
  495. }
  496. /* Look for a single 20-bit section. */
  497. shift = ctz64(val);
  498. tmp = val >> shift;
  499. if (tmp == sextreg(tmp, 0, 20)) {
  500. tcg_out_opc_upper(s, OPC_LUI, rd, tmp << 12);
  501. if (shift > 12) {
  502. tcg_out_opc_imm(s, OPC_SLLI, rd, rd, shift - 12);
  503. } else {
  504. tcg_out_opc_imm(s, OPC_SRAI, rd, rd, 12 - shift);
  505. }
  506. return;
  507. }
  508. /* Look for a few high zero bits, with lots of bits set in the middle. */
  509. shift = clz64(val);
  510. tmp = val << shift;
  511. if (tmp == sextreg(tmp, 12, 20) << 12) {
  512. tcg_out_opc_upper(s, OPC_LUI, rd, tmp);
  513. tcg_out_opc_imm(s, OPC_SRLI, rd, rd, shift);
  514. return;
  515. } else if (tmp == sextreg(tmp, 0, 12)) {
  516. tcg_out_opc_imm(s, OPC_ADDI, rd, TCG_REG_ZERO, tmp);
  517. tcg_out_opc_imm(s, OPC_SRLI, rd, rd, shift);
  518. return;
  519. }
  520. /* Drop into the constant pool. */
  521. new_pool_label(s, val, R_RISCV_CALL, s->code_ptr, 0);
  522. tcg_out_opc_upper(s, OPC_AUIPC, rd, 0);
  523. tcg_out_opc_imm(s, OPC_LD, rd, rd, 0);
  524. }
  525. static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg)
  526. {
  527. tcg_out_opc_imm(s, OPC_ANDI, ret, arg, 0xff);
  528. }
  529. static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg)
  530. {
  531. tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 16);
  532. tcg_out_opc_imm(s, OPC_SRLIW, ret, ret, 16);
  533. }
  534. static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
  535. {
  536. tcg_out_opc_imm(s, OPC_SLLI, ret, arg, 32);
  537. tcg_out_opc_imm(s, OPC_SRLI, ret, ret, 32);
  538. }
  539. static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
  540. {
  541. tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 24);
  542. tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 24);
  543. }
  544. static void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
  545. {
  546. tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 16);
  547. tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 16);
  548. }
  549. static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg)
  550. {
  551. tcg_out_opc_imm(s, OPC_ADDIW, ret, arg, 0);
  552. }
  553. static void tcg_out_ldst(TCGContext *s, RISCVInsn opc, TCGReg data,
  554. TCGReg addr, intptr_t offset)
  555. {
  556. intptr_t imm12 = sextreg(offset, 0, 12);
  557. if (offset != imm12) {
  558. intptr_t diff = offset - (uintptr_t)s->code_ptr;
  559. if (addr == TCG_REG_ZERO && diff == (int32_t)diff) {
  560. imm12 = sextreg(diff, 0, 12);
  561. tcg_out_opc_upper(s, OPC_AUIPC, TCG_REG_TMP2, diff - imm12);
  562. } else {
  563. tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP2, offset - imm12);
  564. if (addr != TCG_REG_ZERO) {
  565. tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP2, TCG_REG_TMP2, addr);
  566. }
  567. }
  568. addr = TCG_REG_TMP2;
  569. }
  570. switch (opc) {
  571. case OPC_SB:
  572. case OPC_SH:
  573. case OPC_SW:
  574. case OPC_SD:
  575. tcg_out_opc_store(s, opc, addr, data, imm12);
  576. break;
  577. case OPC_LB:
  578. case OPC_LBU:
  579. case OPC_LH:
  580. case OPC_LHU:
  581. case OPC_LW:
  582. case OPC_LWU:
  583. case OPC_LD:
  584. tcg_out_opc_imm(s, opc, data, addr, imm12);
  585. break;
  586. default:
  587. g_assert_not_reached();
  588. }
  589. }
  590. static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg arg,
  591. TCGReg arg1, intptr_t arg2)
  592. {
  593. bool is32bit = (TCG_TARGET_REG_BITS == 32 || type == TCG_TYPE_I32);
  594. tcg_out_ldst(s, is32bit ? OPC_LW : OPC_LD, arg, arg1, arg2);
  595. }
  596. static void tcg_out_st(TCGContext *s, TCGType type, TCGReg arg,
  597. TCGReg arg1, intptr_t arg2)
  598. {
  599. bool is32bit = (TCG_TARGET_REG_BITS == 32 || type == TCG_TYPE_I32);
  600. tcg_out_ldst(s, is32bit ? OPC_SW : OPC_SD, arg, arg1, arg2);
  601. }
  602. static bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
  603. TCGReg base, intptr_t ofs)
  604. {
  605. if (val == 0) {
  606. tcg_out_st(s, type, TCG_REG_ZERO, base, ofs);
  607. return true;
  608. }
  609. return false;
  610. }
  611. static void tcg_out_addsub2(TCGContext *s,
  612. TCGReg rl, TCGReg rh,
  613. TCGReg al, TCGReg ah,
  614. TCGArg bl, TCGArg bh,
  615. bool cbl, bool cbh, bool is_sub, bool is32bit)
  616. {
  617. const RISCVInsn opc_add = is32bit ? OPC_ADDW : OPC_ADD;
  618. const RISCVInsn opc_addi = is32bit ? OPC_ADDIW : OPC_ADDI;
  619. const RISCVInsn opc_sub = is32bit ? OPC_SUBW : OPC_SUB;
  620. TCGReg th = TCG_REG_TMP1;
  621. /* If we have a negative constant such that negating it would
  622. make the high part zero, we can (usually) eliminate one insn. */
  623. if (cbl && cbh && bh == -1 && bl != 0) {
  624. bl = -bl;
  625. bh = 0;
  626. is_sub = !is_sub;
  627. }
  628. /* By operating on the high part first, we get to use the final
  629. carry operation to move back from the temporary. */
  630. if (!cbh) {
  631. tcg_out_opc_reg(s, (is_sub ? opc_sub : opc_add), th, ah, bh);
  632. } else if (bh != 0 || ah == rl) {
  633. tcg_out_opc_imm(s, opc_addi, th, ah, (is_sub ? -bh : bh));
  634. } else {
  635. th = ah;
  636. }
  637. /* Note that tcg optimization should eliminate the bl == 0 case. */
  638. if (is_sub) {
  639. if (cbl) {
  640. tcg_out_opc_imm(s, OPC_SLTIU, TCG_REG_TMP0, al, bl);
  641. tcg_out_opc_imm(s, opc_addi, rl, al, -bl);
  642. } else {
  643. tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_TMP0, al, bl);
  644. tcg_out_opc_reg(s, opc_sub, rl, al, bl);
  645. }
  646. tcg_out_opc_reg(s, opc_sub, rh, th, TCG_REG_TMP0);
  647. } else {
  648. if (cbl) {
  649. tcg_out_opc_imm(s, opc_addi, rl, al, bl);
  650. tcg_out_opc_imm(s, OPC_SLTIU, TCG_REG_TMP0, rl, bl);
  651. } else if (rl == al && rl == bl) {
  652. tcg_out_opc_imm(s, OPC_SLTI, TCG_REG_TMP0, al, 0);
  653. tcg_out_opc_reg(s, opc_addi, rl, al, bl);
  654. } else {
  655. tcg_out_opc_reg(s, opc_add, rl, al, bl);
  656. tcg_out_opc_reg(s, OPC_SLTU, TCG_REG_TMP0,
  657. rl, (rl == bl ? al : bl));
  658. }
  659. tcg_out_opc_reg(s, opc_add, rh, th, TCG_REG_TMP0);
  660. }
  661. }
  662. static const struct {
  663. RISCVInsn op;
  664. bool swap;
  665. } tcg_brcond_to_riscv[] = {
  666. [TCG_COND_EQ] = { OPC_BEQ, false },
  667. [TCG_COND_NE] = { OPC_BNE, false },
  668. [TCG_COND_LT] = { OPC_BLT, false },
  669. [TCG_COND_GE] = { OPC_BGE, false },
  670. [TCG_COND_LE] = { OPC_BGE, true },
  671. [TCG_COND_GT] = { OPC_BLT, true },
  672. [TCG_COND_LTU] = { OPC_BLTU, false },
  673. [TCG_COND_GEU] = { OPC_BGEU, false },
  674. [TCG_COND_LEU] = { OPC_BGEU, true },
  675. [TCG_COND_GTU] = { OPC_BLTU, true }
  676. };
  677. static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
  678. TCGReg arg2, TCGLabel *l)
  679. {
  680. RISCVInsn op = tcg_brcond_to_riscv[cond].op;
  681. tcg_debug_assert(op != 0);
  682. if (tcg_brcond_to_riscv[cond].swap) {
  683. TCGReg t = arg1;
  684. arg1 = arg2;
  685. arg2 = t;
  686. }
  687. if (l->has_value) {
  688. intptr_t diff = tcg_pcrel_diff(s, l->u.value_ptr);
  689. if (diff == sextreg(diff, 0, 12)) {
  690. tcg_out_opc_branch(s, op, arg1, arg2, diff);
  691. } else {
  692. /* Invert the conditional branch. */
  693. tcg_out_opc_branch(s, op ^ (1 << 12), arg1, arg2, 8);
  694. tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, diff - 4);
  695. }
  696. } else {
  697. tcg_out_reloc(s, s->code_ptr, R_RISCV_BRANCH, l, 0);
  698. tcg_out_opc_branch(s, op, arg1, arg2, 0);
  699. /* NOP to allow patching later */
  700. tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
  701. }
  702. }
  703. static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
  704. TCGReg arg1, TCGReg arg2)
  705. {
  706. switch (cond) {
  707. case TCG_COND_EQ:
  708. tcg_out_opc_reg(s, OPC_SUB, ret, arg1, arg2);
  709. tcg_out_opc_imm(s, OPC_SLTIU, ret, ret, 1);
  710. break;
  711. case TCG_COND_NE:
  712. tcg_out_opc_reg(s, OPC_SUB, ret, arg1, arg2);
  713. tcg_out_opc_reg(s, OPC_SLTU, ret, TCG_REG_ZERO, ret);
  714. break;
  715. case TCG_COND_LT:
  716. tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
  717. break;
  718. case TCG_COND_GE:
  719. tcg_out_opc_reg(s, OPC_SLT, ret, arg1, arg2);
  720. tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
  721. break;
  722. case TCG_COND_LE:
  723. tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
  724. tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
  725. break;
  726. case TCG_COND_GT:
  727. tcg_out_opc_reg(s, OPC_SLT, ret, arg2, arg1);
  728. break;
  729. case TCG_COND_LTU:
  730. tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
  731. break;
  732. case TCG_COND_GEU:
  733. tcg_out_opc_reg(s, OPC_SLTU, ret, arg1, arg2);
  734. tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
  735. break;
  736. case TCG_COND_LEU:
  737. tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
  738. tcg_out_opc_imm(s, OPC_XORI, ret, ret, 1);
  739. break;
  740. case TCG_COND_GTU:
  741. tcg_out_opc_reg(s, OPC_SLTU, ret, arg2, arg1);
  742. break;
  743. default:
  744. g_assert_not_reached();
  745. break;
  746. }
  747. }
  748. static void tcg_out_brcond2(TCGContext *s, TCGCond cond, TCGReg al, TCGReg ah,
  749. TCGReg bl, TCGReg bh, TCGLabel *l)
  750. {
  751. /* todo */
  752. g_assert_not_reached();
  753. }
  754. static void tcg_out_setcond2(TCGContext *s, TCGCond cond, TCGReg ret,
  755. TCGReg al, TCGReg ah, TCGReg bl, TCGReg bh)
  756. {
  757. /* todo */
  758. g_assert_not_reached();
  759. }
  760. static inline void tcg_out_goto(TCGContext *s, tcg_insn_unit *target)
  761. {
  762. ptrdiff_t offset = tcg_pcrel_diff(s, target);
  763. tcg_debug_assert(offset == sextreg(offset, 1, 20) << 1);
  764. tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, offset);
  765. }
  766. static void tcg_out_call_int(TCGContext *s, tcg_insn_unit *arg, bool tail)
  767. {
  768. TCGReg link = tail ? TCG_REG_ZERO : TCG_REG_RA;
  769. ptrdiff_t offset = tcg_pcrel_diff(s, arg);
  770. int ret;
  771. if (offset == sextreg(offset, 1, 20) << 1) {
  772. /* short jump: -2097150 to 2097152 */
  773. tcg_out_opc_jump(s, OPC_JAL, link, offset);
  774. } else if (TCG_TARGET_REG_BITS == 32 ||
  775. offset == sextreg(offset, 1, 31) << 1) {
  776. /* long jump: -2147483646 to 2147483648 */
  777. tcg_out_opc_upper(s, OPC_AUIPC, TCG_REG_TMP0, 0);
  778. tcg_out_opc_imm(s, OPC_JALR, link, TCG_REG_TMP0, 0);
  779. ret = reloc_call(s->code_ptr - 2, arg);\
  780. tcg_debug_assert(ret == true);
  781. } else if (TCG_TARGET_REG_BITS == 64) {
  782. /* far jump: 64-bit */
  783. tcg_target_long imm = sextreg((tcg_target_long)arg, 0, 12);
  784. tcg_target_long base = (tcg_target_long)arg - imm;
  785. tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_TMP0, base);
  786. tcg_out_opc_imm(s, OPC_JALR, link, TCG_REG_TMP0, imm);
  787. } else {
  788. g_assert_not_reached();
  789. }
  790. }
  791. static void tcg_out_call(TCGContext *s, tcg_insn_unit *arg)
  792. {
  793. tcg_out_call_int(s, arg, false);
  794. }
  795. static void tcg_out_mb(TCGContext *s, TCGArg a0)
  796. {
  797. tcg_insn_unit insn = OPC_FENCE;
  798. if (a0 & TCG_MO_LD_LD) {
  799. insn |= 0x02200000;
  800. }
  801. if (a0 & TCG_MO_ST_LD) {
  802. insn |= 0x01200000;
  803. }
  804. if (a0 & TCG_MO_LD_ST) {
  805. insn |= 0x02100000;
  806. }
  807. if (a0 & TCG_MO_ST_ST) {
  808. insn |= 0x02200000;
  809. }
  810. tcg_out32(s, insn);
  811. }
  812. /*
  813. * Load/store and TLB
  814. */
  815. #if defined(CONFIG_SOFTMMU)
  816. #include "../tcg-ldst.c.inc"
  817. /* helper signature: helper_ret_ld_mmu(CPUState *env, target_ulong addr,
  818. * TCGMemOpIdx oi, uintptr_t ra)
  819. */
  820. static void * const qemu_ld_helpers[16] = {
  821. [MO_UB] = helper_ret_ldub_mmu,
  822. [MO_SB] = helper_ret_ldsb_mmu,
  823. [MO_LEUW] = helper_le_lduw_mmu,
  824. [MO_LESW] = helper_le_ldsw_mmu,
  825. [MO_LEUL] = helper_le_ldul_mmu,
  826. #if TCG_TARGET_REG_BITS == 64
  827. [MO_LESL] = helper_le_ldsl_mmu,
  828. #endif
  829. [MO_LEQ] = helper_le_ldq_mmu,
  830. [MO_BEUW] = helper_be_lduw_mmu,
  831. [MO_BESW] = helper_be_ldsw_mmu,
  832. [MO_BEUL] = helper_be_ldul_mmu,
  833. #if TCG_TARGET_REG_BITS == 64
  834. [MO_BESL] = helper_be_ldsl_mmu,
  835. #endif
  836. [MO_BEQ] = helper_be_ldq_mmu,
  837. };
  838. /* helper signature: helper_ret_st_mmu(CPUState *env, target_ulong addr,
  839. * uintxx_t val, TCGMemOpIdx oi,
  840. * uintptr_t ra)
  841. */
  842. static void * const qemu_st_helpers[16] = {
  843. [MO_UB] = helper_ret_stb_mmu,
  844. [MO_LEUW] = helper_le_stw_mmu,
  845. [MO_LEUL] = helper_le_stl_mmu,
  846. [MO_LEQ] = helper_le_stq_mmu,
  847. [MO_BEUW] = helper_be_stw_mmu,
  848. [MO_BEUL] = helper_be_stl_mmu,
  849. [MO_BEQ] = helper_be_stq_mmu,
  850. };
  851. /* We don't support oversize guests */
  852. QEMU_BUILD_BUG_ON(TCG_TARGET_REG_BITS < TARGET_LONG_BITS);
  853. /* We expect to use a 12-bit negative offset from ENV. */
  854. QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) > 0);
  855. QEMU_BUILD_BUG_ON(TLB_MASK_TABLE_OFS(0) < -(1 << 11));
  856. static void tcg_out_tlb_load(TCGContext *s, TCGReg addrl,
  857. TCGReg addrh, TCGMemOpIdx oi,
  858. tcg_insn_unit **label_ptr, bool is_load)
  859. {
  860. MemOp opc = get_memop(oi);
  861. unsigned s_bits = opc & MO_SIZE;
  862. unsigned a_bits = get_alignment_bits(opc);
  863. tcg_target_long compare_mask;
  864. int mem_index = get_mmuidx(oi);
  865. int fast_ofs = TLB_MASK_TABLE_OFS(mem_index);
  866. int mask_ofs = fast_ofs + offsetof(CPUTLBDescFast, mask);
  867. int table_ofs = fast_ofs + offsetof(CPUTLBDescFast, table);
  868. TCGReg mask_base = TCG_AREG0, table_base = TCG_AREG0;
  869. tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP0, mask_base, mask_ofs);
  870. tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP1, table_base, table_ofs);
  871. tcg_out_opc_imm(s, OPC_SRLI, TCG_REG_TMP2, addrl,
  872. TARGET_PAGE_BITS - CPU_TLB_ENTRY_BITS);
  873. tcg_out_opc_reg(s, OPC_AND, TCG_REG_TMP2, TCG_REG_TMP2, TCG_REG_TMP0);
  874. tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP2, TCG_REG_TMP2, TCG_REG_TMP1);
  875. /* Load the tlb comparator and the addend. */
  876. tcg_out_ld(s, TCG_TYPE_TL, TCG_REG_TMP0, TCG_REG_TMP2,
  877. is_load ? offsetof(CPUTLBEntry, addr_read)
  878. : offsetof(CPUTLBEntry, addr_write));
  879. tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP2, TCG_REG_TMP2,
  880. offsetof(CPUTLBEntry, addend));
  881. /* We don't support unaligned accesses. */
  882. if (a_bits < s_bits) {
  883. a_bits = s_bits;
  884. }
  885. /* Clear the non-page, non-alignment bits from the address. */
  886. compare_mask = (tcg_target_long)TARGET_PAGE_MASK | ((1 << a_bits) - 1);
  887. if (compare_mask == sextreg(compare_mask, 0, 12)) {
  888. tcg_out_opc_imm(s, OPC_ANDI, TCG_REG_TMP1, addrl, compare_mask);
  889. } else {
  890. tcg_out_movi(s, TCG_TYPE_TL, TCG_REG_TMP1, compare_mask);
  891. tcg_out_opc_reg(s, OPC_AND, TCG_REG_TMP1, TCG_REG_TMP1, addrl);
  892. }
  893. /* Compare masked address with the TLB entry. */
  894. label_ptr[0] = s->code_ptr;
  895. tcg_out_opc_branch(s, OPC_BNE, TCG_REG_TMP0, TCG_REG_TMP1, 0);
  896. /* NOP to allow patching later */
  897. tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_ZERO, TCG_REG_ZERO, 0);
  898. /* TLB Hit - translate address using addend. */
  899. if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
  900. tcg_out_ext32u(s, TCG_REG_TMP0, addrl);
  901. addrl = TCG_REG_TMP0;
  902. }
  903. tcg_out_opc_reg(s, OPC_ADD, TCG_REG_TMP0, TCG_REG_TMP2, addrl);
  904. }
  905. static void add_qemu_ldst_label(TCGContext *s, int is_ld, TCGMemOpIdx oi,
  906. TCGType ext,
  907. TCGReg datalo, TCGReg datahi,
  908. TCGReg addrlo, TCGReg addrhi,
  909. void *raddr, tcg_insn_unit **label_ptr)
  910. {
  911. TCGLabelQemuLdst *label = new_ldst_label(s);
  912. label->is_ld = is_ld;
  913. label->oi = oi;
  914. label->type = ext;
  915. label->datalo_reg = datalo;
  916. label->datahi_reg = datahi;
  917. label->addrlo_reg = addrlo;
  918. label->addrhi_reg = addrhi;
  919. label->raddr = raddr;
  920. label->label_ptr[0] = label_ptr[0];
  921. }
  922. static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
  923. {
  924. TCGMemOpIdx oi = l->oi;
  925. MemOp opc = get_memop(oi);
  926. TCGReg a0 = tcg_target_call_iarg_regs[0];
  927. TCGReg a1 = tcg_target_call_iarg_regs[1];
  928. TCGReg a2 = tcg_target_call_iarg_regs[2];
  929. TCGReg a3 = tcg_target_call_iarg_regs[3];
  930. /* We don't support oversize guests */
  931. if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) {
  932. g_assert_not_reached();
  933. }
  934. /* resolve label address */
  935. if (!patch_reloc(l->label_ptr[0], R_RISCV_BRANCH,
  936. (intptr_t) s->code_ptr, 0)) {
  937. return false;
  938. }
  939. /* call load helper */
  940. tcg_out_mov(s, TCG_TYPE_PTR, a0, TCG_AREG0);
  941. tcg_out_mov(s, TCG_TYPE_PTR, a1, l->addrlo_reg);
  942. tcg_out_movi(s, TCG_TYPE_PTR, a2, oi);
  943. tcg_out_movi(s, TCG_TYPE_PTR, a3, (tcg_target_long)l->raddr);
  944. tcg_out_call(s, qemu_ld_helpers[opc & (MO_BSWAP | MO_SSIZE)]);
  945. tcg_out_mov(s, (opc & MO_SIZE) == MO_64, l->datalo_reg, a0);
  946. tcg_out_goto(s, l->raddr);
  947. return true;
  948. }
  949. static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
  950. {
  951. TCGMemOpIdx oi = l->oi;
  952. MemOp opc = get_memop(oi);
  953. MemOp s_bits = opc & MO_SIZE;
  954. TCGReg a0 = tcg_target_call_iarg_regs[0];
  955. TCGReg a1 = tcg_target_call_iarg_regs[1];
  956. TCGReg a2 = tcg_target_call_iarg_regs[2];
  957. TCGReg a3 = tcg_target_call_iarg_regs[3];
  958. TCGReg a4 = tcg_target_call_iarg_regs[4];
  959. /* We don't support oversize guests */
  960. if (TCG_TARGET_REG_BITS < TARGET_LONG_BITS) {
  961. g_assert_not_reached();
  962. }
  963. /* resolve label address */
  964. if (!patch_reloc(l->label_ptr[0], R_RISCV_BRANCH,
  965. (intptr_t) s->code_ptr, 0)) {
  966. return false;
  967. }
  968. /* call store helper */
  969. tcg_out_mov(s, TCG_TYPE_PTR, a0, TCG_AREG0);
  970. tcg_out_mov(s, TCG_TYPE_PTR, a1, l->addrlo_reg);
  971. tcg_out_mov(s, TCG_TYPE_PTR, a2, l->datalo_reg);
  972. switch (s_bits) {
  973. case MO_8:
  974. tcg_out_ext8u(s, a2, a2);
  975. break;
  976. case MO_16:
  977. tcg_out_ext16u(s, a2, a2);
  978. break;
  979. default:
  980. break;
  981. }
  982. tcg_out_movi(s, TCG_TYPE_PTR, a3, oi);
  983. tcg_out_movi(s, TCG_TYPE_PTR, a4, (tcg_target_long)l->raddr);
  984. tcg_out_call(s, qemu_st_helpers[opc & (MO_BSWAP | MO_SSIZE)]);
  985. tcg_out_goto(s, l->raddr);
  986. return true;
  987. }
  988. #endif /* CONFIG_SOFTMMU */
  989. static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg lo, TCGReg hi,
  990. TCGReg base, MemOp opc, bool is_64)
  991. {
  992. const MemOp bswap = opc & MO_BSWAP;
  993. /* We don't yet handle byteswapping, assert */
  994. g_assert(!bswap);
  995. switch (opc & (MO_SSIZE)) {
  996. case MO_UB:
  997. tcg_out_opc_imm(s, OPC_LBU, lo, base, 0);
  998. break;
  999. case MO_SB:
  1000. tcg_out_opc_imm(s, OPC_LB, lo, base, 0);
  1001. break;
  1002. case MO_UW:
  1003. tcg_out_opc_imm(s, OPC_LHU, lo, base, 0);
  1004. break;
  1005. case MO_SW:
  1006. tcg_out_opc_imm(s, OPC_LH, lo, base, 0);
  1007. break;
  1008. case MO_UL:
  1009. if (TCG_TARGET_REG_BITS == 64 && is_64) {
  1010. tcg_out_opc_imm(s, OPC_LWU, lo, base, 0);
  1011. break;
  1012. }
  1013. /* FALLTHRU */
  1014. case MO_SL:
  1015. tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
  1016. break;
  1017. case MO_Q:
  1018. /* Prefer to load from offset 0 first, but allow for overlap. */
  1019. if (TCG_TARGET_REG_BITS == 64) {
  1020. tcg_out_opc_imm(s, OPC_LD, lo, base, 0);
  1021. } else if (lo != base) {
  1022. tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
  1023. tcg_out_opc_imm(s, OPC_LW, hi, base, 4);
  1024. } else {
  1025. tcg_out_opc_imm(s, OPC_LW, hi, base, 4);
  1026. tcg_out_opc_imm(s, OPC_LW, lo, base, 0);
  1027. }
  1028. break;
  1029. default:
  1030. g_assert_not_reached();
  1031. }
  1032. }
  1033. static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
  1034. {
  1035. TCGReg addr_regl, addr_regh __attribute__((unused));
  1036. TCGReg data_regl, data_regh;
  1037. TCGMemOpIdx oi;
  1038. MemOp opc;
  1039. #if defined(CONFIG_SOFTMMU)
  1040. tcg_insn_unit *label_ptr[1];
  1041. #endif
  1042. TCGReg base = TCG_REG_TMP0;
  1043. data_regl = *args++;
  1044. data_regh = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0);
  1045. addr_regl = *args++;
  1046. addr_regh = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0);
  1047. oi = *args++;
  1048. opc = get_memop(oi);
  1049. #if defined(CONFIG_SOFTMMU)
  1050. tcg_out_tlb_load(s, addr_regl, addr_regh, oi, label_ptr, 1);
  1051. tcg_out_qemu_ld_direct(s, data_regl, data_regh, base, opc, is_64);
  1052. add_qemu_ldst_label(s, 1, oi,
  1053. (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
  1054. data_regl, data_regh, addr_regl, addr_regh,
  1055. s->code_ptr, label_ptr);
  1056. #else
  1057. if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
  1058. tcg_out_ext32u(s, base, addr_regl);
  1059. addr_regl = base;
  1060. }
  1061. if (guest_base == 0) {
  1062. tcg_out_opc_reg(s, OPC_ADD, base, addr_regl, TCG_REG_ZERO);
  1063. } else {
  1064. tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
  1065. }
  1066. tcg_out_qemu_ld_direct(s, data_regl, data_regh, base, opc, is_64);
  1067. #endif
  1068. }
  1069. static void tcg_out_qemu_st_direct(TCGContext *s, TCGReg lo, TCGReg hi,
  1070. TCGReg base, MemOp opc)
  1071. {
  1072. const MemOp bswap = opc & MO_BSWAP;
  1073. /* We don't yet handle byteswapping, assert */
  1074. g_assert(!bswap);
  1075. switch (opc & (MO_SSIZE)) {
  1076. case MO_8:
  1077. tcg_out_opc_store(s, OPC_SB, base, lo, 0);
  1078. break;
  1079. case MO_16:
  1080. tcg_out_opc_store(s, OPC_SH, base, lo, 0);
  1081. break;
  1082. case MO_32:
  1083. tcg_out_opc_store(s, OPC_SW, base, lo, 0);
  1084. break;
  1085. case MO_64:
  1086. if (TCG_TARGET_REG_BITS == 64) {
  1087. tcg_out_opc_store(s, OPC_SD, base, lo, 0);
  1088. } else {
  1089. tcg_out_opc_store(s, OPC_SW, base, lo, 0);
  1090. tcg_out_opc_store(s, OPC_SW, base, hi, 4);
  1091. }
  1092. break;
  1093. default:
  1094. g_assert_not_reached();
  1095. }
  1096. }
  1097. static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
  1098. {
  1099. TCGReg addr_regl, addr_regh __attribute__((unused));
  1100. TCGReg data_regl, data_regh;
  1101. TCGMemOpIdx oi;
  1102. MemOp opc;
  1103. #if defined(CONFIG_SOFTMMU)
  1104. tcg_insn_unit *label_ptr[1];
  1105. #endif
  1106. TCGReg base = TCG_REG_TMP0;
  1107. data_regl = *args++;
  1108. data_regh = (TCG_TARGET_REG_BITS == 32 && is_64 ? *args++ : 0);
  1109. addr_regl = *args++;
  1110. addr_regh = (TCG_TARGET_REG_BITS < TARGET_LONG_BITS ? *args++ : 0);
  1111. oi = *args++;
  1112. opc = get_memop(oi);
  1113. #if defined(CONFIG_SOFTMMU)
  1114. tcg_out_tlb_load(s, addr_regl, addr_regh, oi, label_ptr, 0);
  1115. tcg_out_qemu_st_direct(s, data_regl, data_regh, base, opc);
  1116. add_qemu_ldst_label(s, 0, oi,
  1117. (is_64 ? TCG_TYPE_I64 : TCG_TYPE_I32),
  1118. data_regl, data_regh, addr_regl, addr_regh,
  1119. s->code_ptr, label_ptr);
  1120. #else
  1121. if (TCG_TARGET_REG_BITS > TARGET_LONG_BITS) {
  1122. tcg_out_ext32u(s, base, addr_regl);
  1123. addr_regl = base;
  1124. }
  1125. if (guest_base == 0) {
  1126. tcg_out_opc_reg(s, OPC_ADD, base, addr_regl, TCG_REG_ZERO);
  1127. } else {
  1128. tcg_out_opc_reg(s, OPC_ADD, base, TCG_GUEST_BASE_REG, addr_regl);
  1129. }
  1130. tcg_out_qemu_st_direct(s, data_regl, data_regh, base, opc);
  1131. #endif
  1132. }
  1133. static tcg_insn_unit *tb_ret_addr;
  1134. static void tcg_out_op(TCGContext *s, TCGOpcode opc,
  1135. const TCGArg *args, const int *const_args)
  1136. {
  1137. TCGArg a0 = args[0];
  1138. TCGArg a1 = args[1];
  1139. TCGArg a2 = args[2];
  1140. int c2 = const_args[2];
  1141. switch (opc) {
  1142. case INDEX_op_exit_tb:
  1143. /* Reuse the zeroing that exists for goto_ptr. */
  1144. if (a0 == 0) {
  1145. tcg_out_call_int(s, s->code_gen_epilogue, true);
  1146. } else {
  1147. tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A0, a0);
  1148. tcg_out_call_int(s, tb_ret_addr, true);
  1149. }
  1150. break;
  1151. case INDEX_op_goto_tb:
  1152. assert(s->tb_jmp_insn_offset == 0);
  1153. /* indirect jump method */
  1154. tcg_out_ld(s, TCG_TYPE_PTR, TCG_REG_TMP0, TCG_REG_ZERO,
  1155. (uintptr_t)(s->tb_jmp_target_addr + a0));
  1156. tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, TCG_REG_TMP0, 0);
  1157. set_jmp_reset_offset(s, a0);
  1158. break;
  1159. case INDEX_op_goto_ptr:
  1160. tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, a0, 0);
  1161. break;
  1162. case INDEX_op_br:
  1163. tcg_out_reloc(s, s->code_ptr, R_RISCV_JAL, arg_label(a0), 0);
  1164. tcg_out_opc_jump(s, OPC_JAL, TCG_REG_ZERO, 0);
  1165. break;
  1166. case INDEX_op_ld8u_i32:
  1167. case INDEX_op_ld8u_i64:
  1168. tcg_out_ldst(s, OPC_LBU, a0, a1, a2);
  1169. break;
  1170. case INDEX_op_ld8s_i32:
  1171. case INDEX_op_ld8s_i64:
  1172. tcg_out_ldst(s, OPC_LB, a0, a1, a2);
  1173. break;
  1174. case INDEX_op_ld16u_i32:
  1175. case INDEX_op_ld16u_i64:
  1176. tcg_out_ldst(s, OPC_LHU, a0, a1, a2);
  1177. break;
  1178. case INDEX_op_ld16s_i32:
  1179. case INDEX_op_ld16s_i64:
  1180. tcg_out_ldst(s, OPC_LH, a0, a1, a2);
  1181. break;
  1182. case INDEX_op_ld32u_i64:
  1183. tcg_out_ldst(s, OPC_LWU, a0, a1, a2);
  1184. break;
  1185. case INDEX_op_ld_i32:
  1186. case INDEX_op_ld32s_i64:
  1187. tcg_out_ldst(s, OPC_LW, a0, a1, a2);
  1188. break;
  1189. case INDEX_op_ld_i64:
  1190. tcg_out_ldst(s, OPC_LD, a0, a1, a2);
  1191. break;
  1192. case INDEX_op_st8_i32:
  1193. case INDEX_op_st8_i64:
  1194. tcg_out_ldst(s, OPC_SB, a0, a1, a2);
  1195. break;
  1196. case INDEX_op_st16_i32:
  1197. case INDEX_op_st16_i64:
  1198. tcg_out_ldst(s, OPC_SH, a0, a1, a2);
  1199. break;
  1200. case INDEX_op_st_i32:
  1201. case INDEX_op_st32_i64:
  1202. tcg_out_ldst(s, OPC_SW, a0, a1, a2);
  1203. break;
  1204. case INDEX_op_st_i64:
  1205. tcg_out_ldst(s, OPC_SD, a0, a1, a2);
  1206. break;
  1207. case INDEX_op_add_i32:
  1208. if (c2) {
  1209. tcg_out_opc_imm(s, OPC_ADDIW, a0, a1, a2);
  1210. } else {
  1211. tcg_out_opc_reg(s, OPC_ADDW, a0, a1, a2);
  1212. }
  1213. break;
  1214. case INDEX_op_add_i64:
  1215. if (c2) {
  1216. tcg_out_opc_imm(s, OPC_ADDI, a0, a1, a2);
  1217. } else {
  1218. tcg_out_opc_reg(s, OPC_ADD, a0, a1, a2);
  1219. }
  1220. break;
  1221. case INDEX_op_sub_i32:
  1222. if (c2) {
  1223. tcg_out_opc_imm(s, OPC_ADDIW, a0, a1, -a2);
  1224. } else {
  1225. tcg_out_opc_reg(s, OPC_SUBW, a0, a1, a2);
  1226. }
  1227. break;
  1228. case INDEX_op_sub_i64:
  1229. if (c2) {
  1230. tcg_out_opc_imm(s, OPC_ADDI, a0, a1, -a2);
  1231. } else {
  1232. tcg_out_opc_reg(s, OPC_SUB, a0, a1, a2);
  1233. }
  1234. break;
  1235. case INDEX_op_and_i32:
  1236. case INDEX_op_and_i64:
  1237. if (c2) {
  1238. tcg_out_opc_imm(s, OPC_ANDI, a0, a1, a2);
  1239. } else {
  1240. tcg_out_opc_reg(s, OPC_AND, a0, a1, a2);
  1241. }
  1242. break;
  1243. case INDEX_op_or_i32:
  1244. case INDEX_op_or_i64:
  1245. if (c2) {
  1246. tcg_out_opc_imm(s, OPC_ORI, a0, a1, a2);
  1247. } else {
  1248. tcg_out_opc_reg(s, OPC_OR, a0, a1, a2);
  1249. }
  1250. break;
  1251. case INDEX_op_xor_i32:
  1252. case INDEX_op_xor_i64:
  1253. if (c2) {
  1254. tcg_out_opc_imm(s, OPC_XORI, a0, a1, a2);
  1255. } else {
  1256. tcg_out_opc_reg(s, OPC_XOR, a0, a1, a2);
  1257. }
  1258. break;
  1259. case INDEX_op_not_i32:
  1260. case INDEX_op_not_i64:
  1261. tcg_out_opc_imm(s, OPC_XORI, a0, a1, -1);
  1262. break;
  1263. case INDEX_op_neg_i32:
  1264. tcg_out_opc_reg(s, OPC_SUBW, a0, TCG_REG_ZERO, a1);
  1265. break;
  1266. case INDEX_op_neg_i64:
  1267. tcg_out_opc_reg(s, OPC_SUB, a0, TCG_REG_ZERO, a1);
  1268. break;
  1269. case INDEX_op_mul_i32:
  1270. tcg_out_opc_reg(s, OPC_MULW, a0, a1, a2);
  1271. break;
  1272. case INDEX_op_mul_i64:
  1273. tcg_out_opc_reg(s, OPC_MUL, a0, a1, a2);
  1274. break;
  1275. case INDEX_op_div_i32:
  1276. tcg_out_opc_reg(s, OPC_DIVW, a0, a1, a2);
  1277. break;
  1278. case INDEX_op_div_i64:
  1279. tcg_out_opc_reg(s, OPC_DIV, a0, a1, a2);
  1280. break;
  1281. case INDEX_op_divu_i32:
  1282. tcg_out_opc_reg(s, OPC_DIVUW, a0, a1, a2);
  1283. break;
  1284. case INDEX_op_divu_i64:
  1285. tcg_out_opc_reg(s, OPC_DIVU, a0, a1, a2);
  1286. break;
  1287. case INDEX_op_rem_i32:
  1288. tcg_out_opc_reg(s, OPC_REMW, a0, a1, a2);
  1289. break;
  1290. case INDEX_op_rem_i64:
  1291. tcg_out_opc_reg(s, OPC_REM, a0, a1, a2);
  1292. break;
  1293. case INDEX_op_remu_i32:
  1294. tcg_out_opc_reg(s, OPC_REMUW, a0, a1, a2);
  1295. break;
  1296. case INDEX_op_remu_i64:
  1297. tcg_out_opc_reg(s, OPC_REMU, a0, a1, a2);
  1298. break;
  1299. case INDEX_op_shl_i32:
  1300. if (c2) {
  1301. tcg_out_opc_imm(s, OPC_SLLIW, a0, a1, a2);
  1302. } else {
  1303. tcg_out_opc_reg(s, OPC_SLLW, a0, a1, a2);
  1304. }
  1305. break;
  1306. case INDEX_op_shl_i64:
  1307. if (c2) {
  1308. tcg_out_opc_imm(s, OPC_SLLI, a0, a1, a2);
  1309. } else {
  1310. tcg_out_opc_reg(s, OPC_SLL, a0, a1, a2);
  1311. }
  1312. break;
  1313. case INDEX_op_shr_i32:
  1314. if (c2) {
  1315. tcg_out_opc_imm(s, OPC_SRLIW, a0, a1, a2);
  1316. } else {
  1317. tcg_out_opc_reg(s, OPC_SRLW, a0, a1, a2);
  1318. }
  1319. break;
  1320. case INDEX_op_shr_i64:
  1321. if (c2) {
  1322. tcg_out_opc_imm(s, OPC_SRLI, a0, a1, a2);
  1323. } else {
  1324. tcg_out_opc_reg(s, OPC_SRL, a0, a1, a2);
  1325. }
  1326. break;
  1327. case INDEX_op_sar_i32:
  1328. if (c2) {
  1329. tcg_out_opc_imm(s, OPC_SRAIW, a0, a1, a2);
  1330. } else {
  1331. tcg_out_opc_reg(s, OPC_SRAW, a0, a1, a2);
  1332. }
  1333. break;
  1334. case INDEX_op_sar_i64:
  1335. if (c2) {
  1336. tcg_out_opc_imm(s, OPC_SRAI, a0, a1, a2);
  1337. } else {
  1338. tcg_out_opc_reg(s, OPC_SRA, a0, a1, a2);
  1339. }
  1340. break;
  1341. case INDEX_op_add2_i32:
  1342. tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
  1343. const_args[4], const_args[5], false, true);
  1344. break;
  1345. case INDEX_op_add2_i64:
  1346. tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
  1347. const_args[4], const_args[5], false, false);
  1348. break;
  1349. case INDEX_op_sub2_i32:
  1350. tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
  1351. const_args[4], const_args[5], true, true);
  1352. break;
  1353. case INDEX_op_sub2_i64:
  1354. tcg_out_addsub2(s, a0, a1, a2, args[3], args[4], args[5],
  1355. const_args[4], const_args[5], true, false);
  1356. break;
  1357. case INDEX_op_brcond_i32:
  1358. case INDEX_op_brcond_i64:
  1359. tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
  1360. break;
  1361. case INDEX_op_brcond2_i32:
  1362. tcg_out_brcond2(s, args[4], a0, a1, a2, args[3], arg_label(args[5]));
  1363. break;
  1364. case INDEX_op_setcond_i32:
  1365. case INDEX_op_setcond_i64:
  1366. tcg_out_setcond(s, args[3], a0, a1, a2);
  1367. break;
  1368. case INDEX_op_setcond2_i32:
  1369. tcg_out_setcond2(s, args[5], a0, a1, a2, args[3], args[4]);
  1370. break;
  1371. case INDEX_op_qemu_ld_i32:
  1372. tcg_out_qemu_ld(s, args, false);
  1373. break;
  1374. case INDEX_op_qemu_ld_i64:
  1375. tcg_out_qemu_ld(s, args, true);
  1376. break;
  1377. case INDEX_op_qemu_st_i32:
  1378. tcg_out_qemu_st(s, args, false);
  1379. break;
  1380. case INDEX_op_qemu_st_i64:
  1381. tcg_out_qemu_st(s, args, true);
  1382. break;
  1383. case INDEX_op_ext8u_i32:
  1384. case INDEX_op_ext8u_i64:
  1385. tcg_out_ext8u(s, a0, a1);
  1386. break;
  1387. case INDEX_op_ext16u_i32:
  1388. case INDEX_op_ext16u_i64:
  1389. tcg_out_ext16u(s, a0, a1);
  1390. break;
  1391. case INDEX_op_ext32u_i64:
  1392. case INDEX_op_extu_i32_i64:
  1393. tcg_out_ext32u(s, a0, a1);
  1394. break;
  1395. case INDEX_op_ext8s_i32:
  1396. case INDEX_op_ext8s_i64:
  1397. tcg_out_ext8s(s, a0, a1);
  1398. break;
  1399. case INDEX_op_ext16s_i32:
  1400. case INDEX_op_ext16s_i64:
  1401. tcg_out_ext16s(s, a0, a1);
  1402. break;
  1403. case INDEX_op_ext32s_i64:
  1404. case INDEX_op_extrl_i64_i32:
  1405. case INDEX_op_ext_i32_i64:
  1406. tcg_out_ext32s(s, a0, a1);
  1407. break;
  1408. case INDEX_op_extrh_i64_i32:
  1409. tcg_out_opc_imm(s, OPC_SRAI, a0, a1, 32);
  1410. break;
  1411. case INDEX_op_mulsh_i32:
  1412. case INDEX_op_mulsh_i64:
  1413. tcg_out_opc_reg(s, OPC_MULH, a0, a1, a2);
  1414. break;
  1415. case INDEX_op_muluh_i32:
  1416. case INDEX_op_muluh_i64:
  1417. tcg_out_opc_reg(s, OPC_MULHU, a0, a1, a2);
  1418. break;
  1419. case INDEX_op_mb:
  1420. tcg_out_mb(s, a0);
  1421. break;
  1422. case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
  1423. case INDEX_op_mov_i64:
  1424. case INDEX_op_movi_i32: /* Always emitted via tcg_out_movi. */
  1425. case INDEX_op_movi_i64:
  1426. case INDEX_op_call: /* Always emitted via tcg_out_call. */
  1427. default:
  1428. g_assert_not_reached();
  1429. }
  1430. }
  1431. static const TCGTargetOpDef *tcg_target_op_def(TCGOpcode op)
  1432. {
  1433. static const TCGTargetOpDef r
  1434. = { .args_ct_str = { "r" } };
  1435. static const TCGTargetOpDef r_r
  1436. = { .args_ct_str = { "r", "r" } };
  1437. static const TCGTargetOpDef rZ_r
  1438. = { .args_ct_str = { "rZ", "r" } };
  1439. static const TCGTargetOpDef rZ_rZ
  1440. = { .args_ct_str = { "rZ", "rZ" } };
  1441. static const TCGTargetOpDef rZ_rZ_rZ_rZ
  1442. = { .args_ct_str = { "rZ", "rZ", "rZ", "rZ" } };
  1443. static const TCGTargetOpDef r_r_ri
  1444. = { .args_ct_str = { "r", "r", "ri" } };
  1445. static const TCGTargetOpDef r_r_rI
  1446. = { .args_ct_str = { "r", "r", "rI" } };
  1447. static const TCGTargetOpDef r_rZ_rN
  1448. = { .args_ct_str = { "r", "rZ", "rN" } };
  1449. static const TCGTargetOpDef r_rZ_rZ
  1450. = { .args_ct_str = { "r", "rZ", "rZ" } };
  1451. static const TCGTargetOpDef r_rZ_rZ_rZ_rZ
  1452. = { .args_ct_str = { "r", "rZ", "rZ", "rZ", "rZ" } };
  1453. static const TCGTargetOpDef r_L
  1454. = { .args_ct_str = { "r", "L" } };
  1455. static const TCGTargetOpDef r_r_L
  1456. = { .args_ct_str = { "r", "r", "L" } };
  1457. static const TCGTargetOpDef r_L_L
  1458. = { .args_ct_str = { "r", "L", "L" } };
  1459. static const TCGTargetOpDef r_r_L_L
  1460. = { .args_ct_str = { "r", "r", "L", "L" } };
  1461. static const TCGTargetOpDef LZ_L
  1462. = { .args_ct_str = { "LZ", "L" } };
  1463. static const TCGTargetOpDef LZ_L_L
  1464. = { .args_ct_str = { "LZ", "L", "L" } };
  1465. static const TCGTargetOpDef LZ_LZ_L
  1466. = { .args_ct_str = { "LZ", "LZ", "L" } };
  1467. static const TCGTargetOpDef LZ_LZ_L_L
  1468. = { .args_ct_str = { "LZ", "LZ", "L", "L" } };
  1469. static const TCGTargetOpDef r_r_rZ_rZ_rM_rM
  1470. = { .args_ct_str = { "r", "r", "rZ", "rZ", "rM", "rM" } };
  1471. switch (op) {
  1472. case INDEX_op_goto_ptr:
  1473. return &r;
  1474. case INDEX_op_ld8u_i32:
  1475. case INDEX_op_ld8s_i32:
  1476. case INDEX_op_ld16u_i32:
  1477. case INDEX_op_ld16s_i32:
  1478. case INDEX_op_ld_i32:
  1479. case INDEX_op_not_i32:
  1480. case INDEX_op_neg_i32:
  1481. case INDEX_op_ld8u_i64:
  1482. case INDEX_op_ld8s_i64:
  1483. case INDEX_op_ld16u_i64:
  1484. case INDEX_op_ld16s_i64:
  1485. case INDEX_op_ld32s_i64:
  1486. case INDEX_op_ld32u_i64:
  1487. case INDEX_op_ld_i64:
  1488. case INDEX_op_not_i64:
  1489. case INDEX_op_neg_i64:
  1490. case INDEX_op_ext8u_i32:
  1491. case INDEX_op_ext8u_i64:
  1492. case INDEX_op_ext16u_i32:
  1493. case INDEX_op_ext16u_i64:
  1494. case INDEX_op_ext32u_i64:
  1495. case INDEX_op_extu_i32_i64:
  1496. case INDEX_op_ext8s_i32:
  1497. case INDEX_op_ext8s_i64:
  1498. case INDEX_op_ext16s_i32:
  1499. case INDEX_op_ext16s_i64:
  1500. case INDEX_op_ext32s_i64:
  1501. case INDEX_op_extrl_i64_i32:
  1502. case INDEX_op_extrh_i64_i32:
  1503. case INDEX_op_ext_i32_i64:
  1504. return &r_r;
  1505. case INDEX_op_st8_i32:
  1506. case INDEX_op_st16_i32:
  1507. case INDEX_op_st_i32:
  1508. case INDEX_op_st8_i64:
  1509. case INDEX_op_st16_i64:
  1510. case INDEX_op_st32_i64:
  1511. case INDEX_op_st_i64:
  1512. return &rZ_r;
  1513. case INDEX_op_add_i32:
  1514. case INDEX_op_and_i32:
  1515. case INDEX_op_or_i32:
  1516. case INDEX_op_xor_i32:
  1517. case INDEX_op_add_i64:
  1518. case INDEX_op_and_i64:
  1519. case INDEX_op_or_i64:
  1520. case INDEX_op_xor_i64:
  1521. return &r_r_rI;
  1522. case INDEX_op_sub_i32:
  1523. case INDEX_op_sub_i64:
  1524. return &r_rZ_rN;
  1525. case INDEX_op_mul_i32:
  1526. case INDEX_op_mulsh_i32:
  1527. case INDEX_op_muluh_i32:
  1528. case INDEX_op_div_i32:
  1529. case INDEX_op_divu_i32:
  1530. case INDEX_op_rem_i32:
  1531. case INDEX_op_remu_i32:
  1532. case INDEX_op_setcond_i32:
  1533. case INDEX_op_mul_i64:
  1534. case INDEX_op_mulsh_i64:
  1535. case INDEX_op_muluh_i64:
  1536. case INDEX_op_div_i64:
  1537. case INDEX_op_divu_i64:
  1538. case INDEX_op_rem_i64:
  1539. case INDEX_op_remu_i64:
  1540. case INDEX_op_setcond_i64:
  1541. return &r_rZ_rZ;
  1542. case INDEX_op_shl_i32:
  1543. case INDEX_op_shr_i32:
  1544. case INDEX_op_sar_i32:
  1545. case INDEX_op_shl_i64:
  1546. case INDEX_op_shr_i64:
  1547. case INDEX_op_sar_i64:
  1548. return &r_r_ri;
  1549. case INDEX_op_brcond_i32:
  1550. case INDEX_op_brcond_i64:
  1551. return &rZ_rZ;
  1552. case INDEX_op_add2_i32:
  1553. case INDEX_op_add2_i64:
  1554. case INDEX_op_sub2_i32:
  1555. case INDEX_op_sub2_i64:
  1556. return &r_r_rZ_rZ_rM_rM;
  1557. case INDEX_op_brcond2_i32:
  1558. return &rZ_rZ_rZ_rZ;
  1559. case INDEX_op_setcond2_i32:
  1560. return &r_rZ_rZ_rZ_rZ;
  1561. case INDEX_op_qemu_ld_i32:
  1562. return TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? &r_L : &r_L_L;
  1563. case INDEX_op_qemu_st_i32:
  1564. return TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? &LZ_L : &LZ_L_L;
  1565. case INDEX_op_qemu_ld_i64:
  1566. return TCG_TARGET_REG_BITS == 64 ? &r_L
  1567. : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? &r_r_L
  1568. : &r_r_L_L;
  1569. case INDEX_op_qemu_st_i64:
  1570. return TCG_TARGET_REG_BITS == 64 ? &LZ_L
  1571. : TARGET_LONG_BITS <= TCG_TARGET_REG_BITS ? &LZ_LZ_L
  1572. : &LZ_LZ_L_L;
  1573. default:
  1574. return NULL;
  1575. }
  1576. }
  1577. static const int tcg_target_callee_save_regs[] = {
  1578. TCG_REG_S0, /* used for the global env (TCG_AREG0) */
  1579. TCG_REG_S1,
  1580. TCG_REG_S2,
  1581. TCG_REG_S3,
  1582. TCG_REG_S4,
  1583. TCG_REG_S5,
  1584. TCG_REG_S6,
  1585. TCG_REG_S7,
  1586. TCG_REG_S8,
  1587. TCG_REG_S9,
  1588. TCG_REG_S10,
  1589. TCG_REG_S11,
  1590. TCG_REG_RA, /* should be last for ABI compliance */
  1591. };
  1592. /* Stack frame parameters. */
  1593. #define REG_SIZE (TCG_TARGET_REG_BITS / 8)
  1594. #define SAVE_SIZE ((int)ARRAY_SIZE(tcg_target_callee_save_regs) * REG_SIZE)
  1595. #define TEMP_SIZE (CPU_TEMP_BUF_NLONGS * (int)sizeof(long))
  1596. #define FRAME_SIZE ((TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE + SAVE_SIZE \
  1597. + TCG_TARGET_STACK_ALIGN - 1) \
  1598. & -TCG_TARGET_STACK_ALIGN)
  1599. #define SAVE_OFS (TCG_STATIC_CALL_ARGS_SIZE + TEMP_SIZE)
  1600. /* We're expecting to be able to use an immediate for frame allocation. */
  1601. QEMU_BUILD_BUG_ON(FRAME_SIZE > 0x7ff);
  1602. /* Generate global QEMU prologue and epilogue code */
  1603. static void tcg_target_qemu_prologue(TCGContext *s)
  1604. {
  1605. int i;
  1606. tcg_set_frame(s, TCG_REG_SP, TCG_STATIC_CALL_ARGS_SIZE, TEMP_SIZE);
  1607. /* TB prologue */
  1608. tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_SP, TCG_REG_SP, -FRAME_SIZE);
  1609. for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
  1610. tcg_out_st(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i],
  1611. TCG_REG_SP, SAVE_OFS + i * REG_SIZE);
  1612. }
  1613. #if !defined(CONFIG_SOFTMMU)
  1614. tcg_out_movi(s, TCG_TYPE_PTR, TCG_GUEST_BASE_REG, guest_base);
  1615. tcg_regset_set_reg(s->reserved_regs, TCG_GUEST_BASE_REG);
  1616. #endif
  1617. /* Call generated code */
  1618. tcg_out_mov(s, TCG_TYPE_PTR, TCG_AREG0, tcg_target_call_iarg_regs[0]);
  1619. tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, tcg_target_call_iarg_regs[1], 0);
  1620. /* Return path for goto_ptr. Set return value to 0 */
  1621. s->code_gen_epilogue = s->code_ptr;
  1622. tcg_out_mov(s, TCG_TYPE_REG, TCG_REG_A0, TCG_REG_ZERO);
  1623. /* TB epilogue */
  1624. tb_ret_addr = s->code_ptr;
  1625. for (i = 0; i < ARRAY_SIZE(tcg_target_callee_save_regs); i++) {
  1626. tcg_out_ld(s, TCG_TYPE_REG, tcg_target_callee_save_regs[i],
  1627. TCG_REG_SP, SAVE_OFS + i * REG_SIZE);
  1628. }
  1629. tcg_out_opc_imm(s, OPC_ADDI, TCG_REG_SP, TCG_REG_SP, FRAME_SIZE);
  1630. tcg_out_opc_imm(s, OPC_JALR, TCG_REG_ZERO, TCG_REG_RA, 0);
  1631. }
  1632. static void tcg_target_init(TCGContext *s)
  1633. {
  1634. tcg_target_available_regs[TCG_TYPE_I32] = 0xffffffff;
  1635. if (TCG_TARGET_REG_BITS == 64) {
  1636. tcg_target_available_regs[TCG_TYPE_I64] = 0xffffffff;
  1637. }
  1638. tcg_target_call_clobber_regs = -1u;
  1639. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S0);
  1640. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S1);
  1641. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S2);
  1642. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S3);
  1643. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S4);
  1644. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S5);
  1645. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S6);
  1646. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S7);
  1647. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S8);
  1648. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S9);
  1649. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S10);
  1650. tcg_regset_reset_reg(tcg_target_call_clobber_regs, TCG_REG_S11);
  1651. s->reserved_regs = 0;
  1652. tcg_regset_set_reg(s->reserved_regs, TCG_REG_ZERO);
  1653. tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP0);
  1654. tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP1);
  1655. tcg_regset_set_reg(s->reserved_regs, TCG_REG_TMP2);
  1656. tcg_regset_set_reg(s->reserved_regs, TCG_REG_SP);
  1657. tcg_regset_set_reg(s->reserved_regs, TCG_REG_GP);
  1658. tcg_regset_set_reg(s->reserved_regs, TCG_REG_TP);
  1659. }
  1660. typedef struct {
  1661. DebugFrameHeader h;
  1662. uint8_t fde_def_cfa[4];
  1663. uint8_t fde_reg_ofs[ARRAY_SIZE(tcg_target_callee_save_regs) * 2];
  1664. } DebugFrame;
  1665. #define ELF_HOST_MACHINE EM_RISCV
  1666. static const DebugFrame debug_frame = {
  1667. .h.cie.len = sizeof(DebugFrameCIE) - 4, /* length after .len member */
  1668. .h.cie.id = -1,
  1669. .h.cie.version = 1,
  1670. .h.cie.code_align = 1,
  1671. .h.cie.data_align = -(TCG_TARGET_REG_BITS / 8) & 0x7f, /* sleb128 */
  1672. .h.cie.return_column = TCG_REG_RA,
  1673. /* Total FDE size does not include the "len" member. */
  1674. .h.fde.len = sizeof(DebugFrame) - offsetof(DebugFrame, h.fde.cie_offset),
  1675. .fde_def_cfa = {
  1676. 12, TCG_REG_SP, /* DW_CFA_def_cfa sp, ... */
  1677. (FRAME_SIZE & 0x7f) | 0x80, /* ... uleb128 FRAME_SIZE */
  1678. (FRAME_SIZE >> 7)
  1679. },
  1680. .fde_reg_ofs = {
  1681. 0x80 + 9, 12, /* DW_CFA_offset, s1, -96 */
  1682. 0x80 + 18, 11, /* DW_CFA_offset, s2, -88 */
  1683. 0x80 + 19, 10, /* DW_CFA_offset, s3, -80 */
  1684. 0x80 + 20, 9, /* DW_CFA_offset, s4, -72 */
  1685. 0x80 + 21, 8, /* DW_CFA_offset, s5, -64 */
  1686. 0x80 + 22, 7, /* DW_CFA_offset, s6, -56 */
  1687. 0x80 + 23, 6, /* DW_CFA_offset, s7, -48 */
  1688. 0x80 + 24, 5, /* DW_CFA_offset, s8, -40 */
  1689. 0x80 + 25, 4, /* DW_CFA_offset, s9, -32 */
  1690. 0x80 + 26, 3, /* DW_CFA_offset, s10, -24 */
  1691. 0x80 + 27, 2, /* DW_CFA_offset, s11, -16 */
  1692. 0x80 + 1 , 1, /* DW_CFA_offset, ra, -8 */
  1693. }
  1694. };
  1695. void tcg_register_jit(void *buf, size_t buf_size)
  1696. {
  1697. tcg_register_jit_int(buf, buf_size, &debug_frame, sizeof(debug_frame));
  1698. }