translate.c 86 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997
  1. /*
  2. * Alpha emulation cpu translation for qemu.
  3. *
  4. * Copyright (c) 2007 Jocelyn Mayer
  5. *
  6. * This library is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU Lesser General Public
  8. * License as published by the Free Software Foundation; either
  9. * version 2 of the License, or (at your option) any later version.
  10. *
  11. * This library is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  14. * Lesser General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU Lesser General Public
  17. * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include "cpu.h"
  20. #include "disas/disas.h"
  21. #include "qemu/host-utils.h"
  22. #include "tcg-op.h"
  23. #include "exec/cpu_ldst.h"
  24. #include "exec/helper-proto.h"
  25. #include "exec/helper-gen.h"
  26. #include "trace-tcg.h"
  27. #undef ALPHA_DEBUG_DISAS
  28. #define CONFIG_SOFTFLOAT_INLINE
  29. #ifdef ALPHA_DEBUG_DISAS
  30. # define LOG_DISAS(...) qemu_log_mask(CPU_LOG_TB_IN_ASM, ## __VA_ARGS__)
  31. #else
  32. # define LOG_DISAS(...) do { } while (0)
  33. #endif
  34. typedef struct DisasContext DisasContext;
  35. struct DisasContext {
  36. struct TranslationBlock *tb;
  37. uint64_t pc;
  38. #ifndef CONFIG_USER_ONLY
  39. uint64_t palbr;
  40. #endif
  41. int mem_idx;
  42. /* Current rounding mode for this TB. */
  43. int tb_rm;
  44. /* Current flush-to-zero setting for this TB. */
  45. int tb_ftz;
  46. /* implver value for this CPU. */
  47. int implver;
  48. /* The set of registers active in the current context. */
  49. TCGv *ir;
  50. /* Temporaries for $31 and $f31 as source and destination. */
  51. TCGv zero;
  52. TCGv sink;
  53. /* Temporary for immediate constants. */
  54. TCGv lit;
  55. bool singlestep_enabled;
  56. };
  57. /* Return values from translate_one, indicating the state of the TB.
  58. Note that zero indicates that we are not exiting the TB. */
  59. typedef enum {
  60. NO_EXIT,
  61. /* We have emitted one or more goto_tb. No fixup required. */
  62. EXIT_GOTO_TB,
  63. /* We are not using a goto_tb (for whatever reason), but have updated
  64. the PC (for whatever reason), so there's no need to do it again on
  65. exiting the TB. */
  66. EXIT_PC_UPDATED,
  67. /* We are exiting the TB, but have neither emitted a goto_tb, nor
  68. updated the PC for the next instruction to be executed. */
  69. EXIT_PC_STALE,
  70. /* We are ending the TB with a noreturn function call, e.g. longjmp.
  71. No following code will be executed. */
  72. EXIT_NORETURN,
  73. } ExitStatus;
  74. /* global register indexes */
  75. static TCGv_ptr cpu_env;
  76. static TCGv cpu_std_ir[31];
  77. static TCGv cpu_fir[31];
  78. static TCGv cpu_pc;
  79. static TCGv cpu_lock_addr;
  80. static TCGv cpu_lock_st_addr;
  81. static TCGv cpu_lock_value;
  82. #ifndef CONFIG_USER_ONLY
  83. static TCGv cpu_pal_ir[31];
  84. #endif
  85. #include "exec/gen-icount.h"
  86. void alpha_translate_init(void)
  87. {
  88. #define DEF_VAR(V) { &cpu_##V, #V, offsetof(CPUAlphaState, V) }
  89. typedef struct { TCGv *var; const char *name; int ofs; } GlobalVar;
  90. static const GlobalVar vars[] = {
  91. DEF_VAR(pc),
  92. DEF_VAR(lock_addr),
  93. DEF_VAR(lock_st_addr),
  94. DEF_VAR(lock_value),
  95. };
  96. #undef DEF_VAR
  97. /* Use the symbolic register names that match the disassembler. */
  98. static const char greg_names[31][4] = {
  99. "v0", "t0", "t1", "t2", "t3", "t4", "t5", "t6",
  100. "t7", "s0", "s1", "s2", "s3", "s4", "s5", "fp",
  101. "a0", "a1", "a2", "a3", "a4", "a5", "t8", "t9",
  102. "t10", "t11", "ra", "t12", "at", "gp", "sp"
  103. };
  104. static const char freg_names[31][4] = {
  105. "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7",
  106. "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15",
  107. "f16", "f17", "f18", "f19", "f20", "f21", "f22", "f23",
  108. "f24", "f25", "f26", "f27", "f28", "f29", "f30"
  109. };
  110. #ifndef CONFIG_USER_ONLY
  111. static const char shadow_names[8][8] = {
  112. "pal_t7", "pal_s0", "pal_s1", "pal_s2",
  113. "pal_s3", "pal_s4", "pal_s5", "pal_t11"
  114. };
  115. #endif
  116. static bool done_init = 0;
  117. int i;
  118. if (done_init) {
  119. return;
  120. }
  121. done_init = 1;
  122. cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
  123. for (i = 0; i < 31; i++) {
  124. cpu_std_ir[i] = tcg_global_mem_new_i64(TCG_AREG0,
  125. offsetof(CPUAlphaState, ir[i]),
  126. greg_names[i]);
  127. }
  128. for (i = 0; i < 31; i++) {
  129. cpu_fir[i] = tcg_global_mem_new_i64(TCG_AREG0,
  130. offsetof(CPUAlphaState, fir[i]),
  131. freg_names[i]);
  132. }
  133. #ifndef CONFIG_USER_ONLY
  134. memcpy(cpu_pal_ir, cpu_std_ir, sizeof(cpu_pal_ir));
  135. for (i = 0; i < 8; i++) {
  136. int r = (i == 7 ? 25 : i + 8);
  137. cpu_pal_ir[r] = tcg_global_mem_new_i64(TCG_AREG0,
  138. offsetof(CPUAlphaState,
  139. shadow[i]),
  140. shadow_names[i]);
  141. }
  142. #endif
  143. for (i = 0; i < ARRAY_SIZE(vars); ++i) {
  144. const GlobalVar *v = &vars[i];
  145. *v->var = tcg_global_mem_new_i64(TCG_AREG0, v->ofs, v->name);
  146. }
  147. }
  148. static TCGv load_zero(DisasContext *ctx)
  149. {
  150. if (TCGV_IS_UNUSED_I64(ctx->zero)) {
  151. ctx->zero = tcg_const_i64(0);
  152. }
  153. return ctx->zero;
  154. }
  155. static TCGv dest_sink(DisasContext *ctx)
  156. {
  157. if (TCGV_IS_UNUSED_I64(ctx->sink)) {
  158. ctx->sink = tcg_temp_new();
  159. }
  160. return ctx->sink;
  161. }
  162. static TCGv load_gpr(DisasContext *ctx, unsigned reg)
  163. {
  164. if (likely(reg < 31)) {
  165. return ctx->ir[reg];
  166. } else {
  167. return load_zero(ctx);
  168. }
  169. }
  170. static TCGv load_gpr_lit(DisasContext *ctx, unsigned reg,
  171. uint8_t lit, bool islit)
  172. {
  173. if (islit) {
  174. ctx->lit = tcg_const_i64(lit);
  175. return ctx->lit;
  176. } else if (likely(reg < 31)) {
  177. return ctx->ir[reg];
  178. } else {
  179. return load_zero(ctx);
  180. }
  181. }
  182. static TCGv dest_gpr(DisasContext *ctx, unsigned reg)
  183. {
  184. if (likely(reg < 31)) {
  185. return ctx->ir[reg];
  186. } else {
  187. return dest_sink(ctx);
  188. }
  189. }
  190. static TCGv load_fpr(DisasContext *ctx, unsigned reg)
  191. {
  192. if (likely(reg < 31)) {
  193. return cpu_fir[reg];
  194. } else {
  195. return load_zero(ctx);
  196. }
  197. }
  198. static TCGv dest_fpr(DisasContext *ctx, unsigned reg)
  199. {
  200. if (likely(reg < 31)) {
  201. return cpu_fir[reg];
  202. } else {
  203. return dest_sink(ctx);
  204. }
  205. }
  206. static void gen_excp_1(int exception, int error_code)
  207. {
  208. TCGv_i32 tmp1, tmp2;
  209. tmp1 = tcg_const_i32(exception);
  210. tmp2 = tcg_const_i32(error_code);
  211. gen_helper_excp(cpu_env, tmp1, tmp2);
  212. tcg_temp_free_i32(tmp2);
  213. tcg_temp_free_i32(tmp1);
  214. }
  215. static ExitStatus gen_excp(DisasContext *ctx, int exception, int error_code)
  216. {
  217. tcg_gen_movi_i64(cpu_pc, ctx->pc);
  218. gen_excp_1(exception, error_code);
  219. return EXIT_NORETURN;
  220. }
  221. static inline ExitStatus gen_invalid(DisasContext *ctx)
  222. {
  223. return gen_excp(ctx, EXCP_OPCDEC, 0);
  224. }
  225. static inline void gen_qemu_ldf(TCGv t0, TCGv t1, int flags)
  226. {
  227. TCGv_i32 tmp32 = tcg_temp_new_i32();
  228. tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
  229. gen_helper_memory_to_f(t0, tmp32);
  230. tcg_temp_free_i32(tmp32);
  231. }
  232. static inline void gen_qemu_ldg(TCGv t0, TCGv t1, int flags)
  233. {
  234. TCGv tmp = tcg_temp_new();
  235. tcg_gen_qemu_ld_i64(tmp, t1, flags, MO_LEQ);
  236. gen_helper_memory_to_g(t0, tmp);
  237. tcg_temp_free(tmp);
  238. }
  239. static inline void gen_qemu_lds(TCGv t0, TCGv t1, int flags)
  240. {
  241. TCGv_i32 tmp32 = tcg_temp_new_i32();
  242. tcg_gen_qemu_ld_i32(tmp32, t1, flags, MO_LEUL);
  243. gen_helper_memory_to_s(t0, tmp32);
  244. tcg_temp_free_i32(tmp32);
  245. }
  246. static inline void gen_qemu_ldl_l(TCGv t0, TCGv t1, int flags)
  247. {
  248. tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LESL);
  249. tcg_gen_mov_i64(cpu_lock_addr, t1);
  250. tcg_gen_mov_i64(cpu_lock_value, t0);
  251. }
  252. static inline void gen_qemu_ldq_l(TCGv t0, TCGv t1, int flags)
  253. {
  254. tcg_gen_qemu_ld_i64(t0, t1, flags, MO_LEQ);
  255. tcg_gen_mov_i64(cpu_lock_addr, t1);
  256. tcg_gen_mov_i64(cpu_lock_value, t0);
  257. }
  258. static inline void gen_load_mem(DisasContext *ctx,
  259. void (*tcg_gen_qemu_load)(TCGv t0, TCGv t1,
  260. int flags),
  261. int ra, int rb, int32_t disp16, bool fp,
  262. bool clear)
  263. {
  264. TCGv tmp, addr, va;
  265. /* LDQ_U with ra $31 is UNOP. Other various loads are forms of
  266. prefetches, which we can treat as nops. No worries about
  267. missed exceptions here. */
  268. if (unlikely(ra == 31)) {
  269. return;
  270. }
  271. tmp = tcg_temp_new();
  272. addr = load_gpr(ctx, rb);
  273. if (disp16) {
  274. tcg_gen_addi_i64(tmp, addr, disp16);
  275. addr = tmp;
  276. }
  277. if (clear) {
  278. tcg_gen_andi_i64(tmp, addr, ~0x7);
  279. addr = tmp;
  280. }
  281. va = (fp ? cpu_fir[ra] : ctx->ir[ra]);
  282. tcg_gen_qemu_load(va, addr, ctx->mem_idx);
  283. tcg_temp_free(tmp);
  284. }
  285. static inline void gen_qemu_stf(TCGv t0, TCGv t1, int flags)
  286. {
  287. TCGv_i32 tmp32 = tcg_temp_new_i32();
  288. gen_helper_f_to_memory(tmp32, t0);
  289. tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
  290. tcg_temp_free_i32(tmp32);
  291. }
  292. static inline void gen_qemu_stg(TCGv t0, TCGv t1, int flags)
  293. {
  294. TCGv tmp = tcg_temp_new();
  295. gen_helper_g_to_memory(tmp, t0);
  296. tcg_gen_qemu_st_i64(tmp, t1, flags, MO_LEQ);
  297. tcg_temp_free(tmp);
  298. }
  299. static inline void gen_qemu_sts(TCGv t0, TCGv t1, int flags)
  300. {
  301. TCGv_i32 tmp32 = tcg_temp_new_i32();
  302. gen_helper_s_to_memory(tmp32, t0);
  303. tcg_gen_qemu_st_i32(tmp32, t1, flags, MO_LEUL);
  304. tcg_temp_free_i32(tmp32);
  305. }
  306. static inline void gen_store_mem(DisasContext *ctx,
  307. void (*tcg_gen_qemu_store)(TCGv t0, TCGv t1,
  308. int flags),
  309. int ra, int rb, int32_t disp16, bool fp,
  310. bool clear)
  311. {
  312. TCGv tmp, addr, va;
  313. tmp = tcg_temp_new();
  314. addr = load_gpr(ctx, rb);
  315. if (disp16) {
  316. tcg_gen_addi_i64(tmp, addr, disp16);
  317. addr = tmp;
  318. }
  319. if (clear) {
  320. tcg_gen_andi_i64(tmp, addr, ~0x7);
  321. addr = tmp;
  322. }
  323. va = (fp ? load_fpr(ctx, ra) : load_gpr(ctx, ra));
  324. tcg_gen_qemu_store(va, addr, ctx->mem_idx);
  325. tcg_temp_free(tmp);
  326. }
  327. static ExitStatus gen_store_conditional(DisasContext *ctx, int ra, int rb,
  328. int32_t disp16, int quad)
  329. {
  330. TCGv addr;
  331. if (ra == 31) {
  332. /* ??? Don't bother storing anything. The user can't tell
  333. the difference, since the zero register always reads zero. */
  334. return NO_EXIT;
  335. }
  336. #if defined(CONFIG_USER_ONLY)
  337. addr = cpu_lock_st_addr;
  338. #else
  339. addr = tcg_temp_local_new();
  340. #endif
  341. tcg_gen_addi_i64(addr, load_gpr(ctx, rb), disp16);
  342. #if defined(CONFIG_USER_ONLY)
  343. /* ??? This is handled via a complicated version of compare-and-swap
  344. in the cpu_loop. Hopefully one day we'll have a real CAS opcode
  345. in TCG so that this isn't necessary. */
  346. return gen_excp(ctx, quad ? EXCP_STQ_C : EXCP_STL_C, ra);
  347. #else
  348. /* ??? In system mode we are never multi-threaded, so CAS can be
  349. implemented via a non-atomic load-compare-store sequence. */
  350. {
  351. TCGLabel *lab_fail, *lab_done;
  352. TCGv val;
  353. lab_fail = gen_new_label();
  354. lab_done = gen_new_label();
  355. tcg_gen_brcond_i64(TCG_COND_NE, addr, cpu_lock_addr, lab_fail);
  356. val = tcg_temp_new();
  357. tcg_gen_qemu_ld_i64(val, addr, ctx->mem_idx, quad ? MO_LEQ : MO_LESL);
  358. tcg_gen_brcond_i64(TCG_COND_NE, val, cpu_lock_value, lab_fail);
  359. tcg_gen_qemu_st_i64(ctx->ir[ra], addr, ctx->mem_idx,
  360. quad ? MO_LEQ : MO_LEUL);
  361. tcg_gen_movi_i64(ctx->ir[ra], 1);
  362. tcg_gen_br(lab_done);
  363. gen_set_label(lab_fail);
  364. tcg_gen_movi_i64(ctx->ir[ra], 0);
  365. gen_set_label(lab_done);
  366. tcg_gen_movi_i64(cpu_lock_addr, -1);
  367. tcg_temp_free(addr);
  368. return NO_EXIT;
  369. }
  370. #endif
  371. }
  372. static bool in_superpage(DisasContext *ctx, int64_t addr)
  373. {
  374. return ((ctx->tb->flags & TB_FLAGS_USER_MODE) == 0
  375. && addr < 0
  376. && ((addr >> 41) & 3) == 2
  377. && addr >> TARGET_VIRT_ADDR_SPACE_BITS == addr >> 63);
  378. }
  379. static bool use_goto_tb(DisasContext *ctx, uint64_t dest)
  380. {
  381. /* Suppress goto_tb in the case of single-steping and IO. */
  382. if ((ctx->tb->cflags & CF_LAST_IO)
  383. || ctx->singlestep_enabled || singlestep) {
  384. return false;
  385. }
  386. /* If the destination is in the superpage, the page perms can't change. */
  387. if (in_superpage(ctx, dest)) {
  388. return true;
  389. }
  390. /* Check for the dest on the same page as the start of the TB. */
  391. return ((ctx->tb->pc ^ dest) & TARGET_PAGE_MASK) == 0;
  392. }
  393. static ExitStatus gen_bdirect(DisasContext *ctx, int ra, int32_t disp)
  394. {
  395. uint64_t dest = ctx->pc + (disp << 2);
  396. if (ra != 31) {
  397. tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
  398. }
  399. /* Notice branch-to-next; used to initialize RA with the PC. */
  400. if (disp == 0) {
  401. return 0;
  402. } else if (use_goto_tb(ctx, dest)) {
  403. tcg_gen_goto_tb(0);
  404. tcg_gen_movi_i64(cpu_pc, dest);
  405. tcg_gen_exit_tb((uintptr_t)ctx->tb);
  406. return EXIT_GOTO_TB;
  407. } else {
  408. tcg_gen_movi_i64(cpu_pc, dest);
  409. return EXIT_PC_UPDATED;
  410. }
  411. }
  412. static ExitStatus gen_bcond_internal(DisasContext *ctx, TCGCond cond,
  413. TCGv cmp, int32_t disp)
  414. {
  415. uint64_t dest = ctx->pc + (disp << 2);
  416. TCGLabel *lab_true = gen_new_label();
  417. if (use_goto_tb(ctx, dest)) {
  418. tcg_gen_brcondi_i64(cond, cmp, 0, lab_true);
  419. tcg_gen_goto_tb(0);
  420. tcg_gen_movi_i64(cpu_pc, ctx->pc);
  421. tcg_gen_exit_tb((uintptr_t)ctx->tb);
  422. gen_set_label(lab_true);
  423. tcg_gen_goto_tb(1);
  424. tcg_gen_movi_i64(cpu_pc, dest);
  425. tcg_gen_exit_tb((uintptr_t)ctx->tb + 1);
  426. return EXIT_GOTO_TB;
  427. } else {
  428. TCGv_i64 z = tcg_const_i64(0);
  429. TCGv_i64 d = tcg_const_i64(dest);
  430. TCGv_i64 p = tcg_const_i64(ctx->pc);
  431. tcg_gen_movcond_i64(cond, cpu_pc, cmp, z, d, p);
  432. tcg_temp_free_i64(z);
  433. tcg_temp_free_i64(d);
  434. tcg_temp_free_i64(p);
  435. return EXIT_PC_UPDATED;
  436. }
  437. }
  438. static ExitStatus gen_bcond(DisasContext *ctx, TCGCond cond, int ra,
  439. int32_t disp, int mask)
  440. {
  441. TCGv cmp_tmp;
  442. if (mask) {
  443. cmp_tmp = tcg_temp_new();
  444. tcg_gen_andi_i64(cmp_tmp, load_gpr(ctx, ra), 1);
  445. } else {
  446. cmp_tmp = load_gpr(ctx, ra);
  447. }
  448. return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
  449. }
  450. /* Fold -0.0 for comparison with COND. */
  451. static void gen_fold_mzero(TCGCond cond, TCGv dest, TCGv src)
  452. {
  453. uint64_t mzero = 1ull << 63;
  454. switch (cond) {
  455. case TCG_COND_LE:
  456. case TCG_COND_GT:
  457. /* For <= or >, the -0.0 value directly compares the way we want. */
  458. tcg_gen_mov_i64(dest, src);
  459. break;
  460. case TCG_COND_EQ:
  461. case TCG_COND_NE:
  462. /* For == or !=, we can simply mask off the sign bit and compare. */
  463. tcg_gen_andi_i64(dest, src, mzero - 1);
  464. break;
  465. case TCG_COND_GE:
  466. case TCG_COND_LT:
  467. /* For >= or <, map -0.0 to +0.0 via comparison and mask. */
  468. tcg_gen_setcondi_i64(TCG_COND_NE, dest, src, mzero);
  469. tcg_gen_neg_i64(dest, dest);
  470. tcg_gen_and_i64(dest, dest, src);
  471. break;
  472. default:
  473. abort();
  474. }
  475. }
  476. static ExitStatus gen_fbcond(DisasContext *ctx, TCGCond cond, int ra,
  477. int32_t disp)
  478. {
  479. TCGv cmp_tmp = tcg_temp_new();
  480. gen_fold_mzero(cond, cmp_tmp, load_fpr(ctx, ra));
  481. return gen_bcond_internal(ctx, cond, cmp_tmp, disp);
  482. }
  483. static void gen_fcmov(DisasContext *ctx, TCGCond cond, int ra, int rb, int rc)
  484. {
  485. TCGv_i64 va, vb, z;
  486. z = load_zero(ctx);
  487. vb = load_fpr(ctx, rb);
  488. va = tcg_temp_new();
  489. gen_fold_mzero(cond, va, load_fpr(ctx, ra));
  490. tcg_gen_movcond_i64(cond, dest_fpr(ctx, rc), va, z, vb, load_fpr(ctx, rc));
  491. tcg_temp_free(va);
  492. }
  493. #define QUAL_RM_N 0x080 /* Round mode nearest even */
  494. #define QUAL_RM_C 0x000 /* Round mode chopped */
  495. #define QUAL_RM_M 0x040 /* Round mode minus infinity */
  496. #define QUAL_RM_D 0x0c0 /* Round mode dynamic */
  497. #define QUAL_RM_MASK 0x0c0
  498. #define QUAL_U 0x100 /* Underflow enable (fp output) */
  499. #define QUAL_V 0x100 /* Overflow enable (int output) */
  500. #define QUAL_S 0x400 /* Software completion enable */
  501. #define QUAL_I 0x200 /* Inexact detection enable */
  502. static void gen_qual_roundmode(DisasContext *ctx, int fn11)
  503. {
  504. TCGv_i32 tmp;
  505. fn11 &= QUAL_RM_MASK;
  506. if (fn11 == ctx->tb_rm) {
  507. return;
  508. }
  509. ctx->tb_rm = fn11;
  510. tmp = tcg_temp_new_i32();
  511. switch (fn11) {
  512. case QUAL_RM_N:
  513. tcg_gen_movi_i32(tmp, float_round_nearest_even);
  514. break;
  515. case QUAL_RM_C:
  516. tcg_gen_movi_i32(tmp, float_round_to_zero);
  517. break;
  518. case QUAL_RM_M:
  519. tcg_gen_movi_i32(tmp, float_round_down);
  520. break;
  521. case QUAL_RM_D:
  522. tcg_gen_ld8u_i32(tmp, cpu_env,
  523. offsetof(CPUAlphaState, fpcr_dyn_round));
  524. break;
  525. }
  526. #if defined(CONFIG_SOFTFLOAT_INLINE)
  527. /* ??? The "fpu/softfloat.h" interface is to call set_float_rounding_mode.
  528. With CONFIG_SOFTFLOAT that expands to an out-of-line call that just
  529. sets the one field. */
  530. tcg_gen_st8_i32(tmp, cpu_env,
  531. offsetof(CPUAlphaState, fp_status.float_rounding_mode));
  532. #else
  533. gen_helper_setroundmode(tmp);
  534. #endif
  535. tcg_temp_free_i32(tmp);
  536. }
  537. static void gen_qual_flushzero(DisasContext *ctx, int fn11)
  538. {
  539. TCGv_i32 tmp;
  540. fn11 &= QUAL_U;
  541. if (fn11 == ctx->tb_ftz) {
  542. return;
  543. }
  544. ctx->tb_ftz = fn11;
  545. tmp = tcg_temp_new_i32();
  546. if (fn11) {
  547. /* Underflow is enabled, use the FPCR setting. */
  548. tcg_gen_ld8u_i32(tmp, cpu_env,
  549. offsetof(CPUAlphaState, fpcr_flush_to_zero));
  550. } else {
  551. /* Underflow is disabled, force flush-to-zero. */
  552. tcg_gen_movi_i32(tmp, 1);
  553. }
  554. #if defined(CONFIG_SOFTFLOAT_INLINE)
  555. tcg_gen_st8_i32(tmp, cpu_env,
  556. offsetof(CPUAlphaState, fp_status.flush_to_zero));
  557. #else
  558. gen_helper_setflushzero(tmp);
  559. #endif
  560. tcg_temp_free_i32(tmp);
  561. }
  562. static TCGv gen_ieee_input(DisasContext *ctx, int reg, int fn11, int is_cmp)
  563. {
  564. TCGv val;
  565. if (unlikely(reg == 31)) {
  566. val = load_zero(ctx);
  567. } else {
  568. val = cpu_fir[reg];
  569. if ((fn11 & QUAL_S) == 0) {
  570. if (is_cmp) {
  571. gen_helper_ieee_input_cmp(cpu_env, val);
  572. } else {
  573. gen_helper_ieee_input(cpu_env, val);
  574. }
  575. } else {
  576. #ifndef CONFIG_USER_ONLY
  577. /* In system mode, raise exceptions for denormals like real
  578. hardware. In user mode, proceed as if the OS completion
  579. handler is handling the denormal as per spec. */
  580. gen_helper_ieee_input_s(cpu_env, val);
  581. #endif
  582. }
  583. }
  584. return val;
  585. }
  586. static void gen_fp_exc_raise(int rc, int fn11)
  587. {
  588. /* ??? We ought to be able to do something with imprecise exceptions.
  589. E.g. notice we're still in the trap shadow of something within the
  590. TB and do not generate the code to signal the exception; end the TB
  591. when an exception is forced to arrive, either by consumption of a
  592. register value or TRAPB or EXCB. */
  593. TCGv_i32 reg, ign;
  594. uint32_t ignore = 0;
  595. if (!(fn11 & QUAL_U)) {
  596. /* Note that QUAL_U == QUAL_V, so ignore either. */
  597. ignore |= FPCR_UNF | FPCR_IOV;
  598. }
  599. if (!(fn11 & QUAL_I)) {
  600. ignore |= FPCR_INE;
  601. }
  602. ign = tcg_const_i32(ignore);
  603. /* ??? Pass in the regno of the destination so that the helper can
  604. set EXC_MASK, which contains a bitmask of destination registers
  605. that have caused arithmetic traps. A simple userspace emulation
  606. does not require this. We do need it for a guest kernel's entArith,
  607. or if we were to do something clever with imprecise exceptions. */
  608. reg = tcg_const_i32(rc + 32);
  609. if (fn11 & QUAL_S) {
  610. gen_helper_fp_exc_raise_s(cpu_env, ign, reg);
  611. } else {
  612. gen_helper_fp_exc_raise(cpu_env, ign, reg);
  613. }
  614. tcg_temp_free_i32(reg);
  615. tcg_temp_free_i32(ign);
  616. }
  617. static void gen_cvtlq(TCGv vc, TCGv vb)
  618. {
  619. TCGv tmp = tcg_temp_new();
  620. /* The arithmetic right shift here, plus the sign-extended mask below
  621. yields a sign-extended result without an explicit ext32s_i64. */
  622. tcg_gen_sari_i64(tmp, vb, 32);
  623. tcg_gen_shri_i64(vc, vb, 29);
  624. tcg_gen_andi_i64(tmp, tmp, (int32_t)0xc0000000);
  625. tcg_gen_andi_i64(vc, vc, 0x3fffffff);
  626. tcg_gen_or_i64(vc, vc, tmp);
  627. tcg_temp_free(tmp);
  628. }
  629. static void gen_ieee_arith2(DisasContext *ctx,
  630. void (*helper)(TCGv, TCGv_ptr, TCGv),
  631. int rb, int rc, int fn11)
  632. {
  633. TCGv vb;
  634. gen_qual_roundmode(ctx, fn11);
  635. gen_qual_flushzero(ctx, fn11);
  636. vb = gen_ieee_input(ctx, rb, fn11, 0);
  637. helper(dest_fpr(ctx, rc), cpu_env, vb);
  638. gen_fp_exc_raise(rc, fn11);
  639. }
  640. #define IEEE_ARITH2(name) \
  641. static inline void glue(gen_, name)(DisasContext *ctx, \
  642. int rb, int rc, int fn11) \
  643. { \
  644. gen_ieee_arith2(ctx, gen_helper_##name, rb, rc, fn11); \
  645. }
  646. IEEE_ARITH2(sqrts)
  647. IEEE_ARITH2(sqrtt)
  648. IEEE_ARITH2(cvtst)
  649. IEEE_ARITH2(cvtts)
  650. static void gen_cvttq(DisasContext *ctx, int rb, int rc, int fn11)
  651. {
  652. TCGv vb, vc;
  653. /* No need to set flushzero, since we have an integer output. */
  654. vb = gen_ieee_input(ctx, rb, fn11, 0);
  655. vc = dest_fpr(ctx, rc);
  656. /* Almost all integer conversions use cropped rounding;
  657. special case that. */
  658. if ((fn11 & QUAL_RM_MASK) == QUAL_RM_C) {
  659. gen_helper_cvttq_c(vc, cpu_env, vb);
  660. } else {
  661. gen_qual_roundmode(ctx, fn11);
  662. gen_helper_cvttq(vc, cpu_env, vb);
  663. }
  664. gen_fp_exc_raise(rc, fn11);
  665. }
  666. static void gen_ieee_intcvt(DisasContext *ctx,
  667. void (*helper)(TCGv, TCGv_ptr, TCGv),
  668. int rb, int rc, int fn11)
  669. {
  670. TCGv vb, vc;
  671. gen_qual_roundmode(ctx, fn11);
  672. vb = load_fpr(ctx, rb);
  673. vc = dest_fpr(ctx, rc);
  674. /* The only exception that can be raised by integer conversion
  675. is inexact. Thus we only need to worry about exceptions when
  676. inexact handling is requested. */
  677. if (fn11 & QUAL_I) {
  678. helper(vc, cpu_env, vb);
  679. gen_fp_exc_raise(rc, fn11);
  680. } else {
  681. helper(vc, cpu_env, vb);
  682. }
  683. }
  684. #define IEEE_INTCVT(name) \
  685. static inline void glue(gen_, name)(DisasContext *ctx, \
  686. int rb, int rc, int fn11) \
  687. { \
  688. gen_ieee_intcvt(ctx, gen_helper_##name, rb, rc, fn11); \
  689. }
  690. IEEE_INTCVT(cvtqs)
  691. IEEE_INTCVT(cvtqt)
  692. static void gen_cpy_mask(TCGv vc, TCGv va, TCGv vb, bool inv_a, uint64_t mask)
  693. {
  694. TCGv vmask = tcg_const_i64(mask);
  695. TCGv tmp = tcg_temp_new_i64();
  696. if (inv_a) {
  697. tcg_gen_andc_i64(tmp, vmask, va);
  698. } else {
  699. tcg_gen_and_i64(tmp, va, vmask);
  700. }
  701. tcg_gen_andc_i64(vc, vb, vmask);
  702. tcg_gen_or_i64(vc, vc, tmp);
  703. tcg_temp_free(vmask);
  704. tcg_temp_free(tmp);
  705. }
  706. static void gen_ieee_arith3(DisasContext *ctx,
  707. void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
  708. int ra, int rb, int rc, int fn11)
  709. {
  710. TCGv va, vb, vc;
  711. gen_qual_roundmode(ctx, fn11);
  712. gen_qual_flushzero(ctx, fn11);
  713. va = gen_ieee_input(ctx, ra, fn11, 0);
  714. vb = gen_ieee_input(ctx, rb, fn11, 0);
  715. vc = dest_fpr(ctx, rc);
  716. helper(vc, cpu_env, va, vb);
  717. gen_fp_exc_raise(rc, fn11);
  718. }
  719. #define IEEE_ARITH3(name) \
  720. static inline void glue(gen_, name)(DisasContext *ctx, \
  721. int ra, int rb, int rc, int fn11) \
  722. { \
  723. gen_ieee_arith3(ctx, gen_helper_##name, ra, rb, rc, fn11); \
  724. }
  725. IEEE_ARITH3(adds)
  726. IEEE_ARITH3(subs)
  727. IEEE_ARITH3(muls)
  728. IEEE_ARITH3(divs)
  729. IEEE_ARITH3(addt)
  730. IEEE_ARITH3(subt)
  731. IEEE_ARITH3(mult)
  732. IEEE_ARITH3(divt)
  733. static void gen_ieee_compare(DisasContext *ctx,
  734. void (*helper)(TCGv, TCGv_ptr, TCGv, TCGv),
  735. int ra, int rb, int rc, int fn11)
  736. {
  737. TCGv va, vb, vc;
  738. va = gen_ieee_input(ctx, ra, fn11, 1);
  739. vb = gen_ieee_input(ctx, rb, fn11, 1);
  740. vc = dest_fpr(ctx, rc);
  741. helper(vc, cpu_env, va, vb);
  742. gen_fp_exc_raise(rc, fn11);
  743. }
  744. #define IEEE_CMP3(name) \
  745. static inline void glue(gen_, name)(DisasContext *ctx, \
  746. int ra, int rb, int rc, int fn11) \
  747. { \
  748. gen_ieee_compare(ctx, gen_helper_##name, ra, rb, rc, fn11); \
  749. }
  750. IEEE_CMP3(cmptun)
  751. IEEE_CMP3(cmpteq)
  752. IEEE_CMP3(cmptlt)
  753. IEEE_CMP3(cmptle)
  754. static inline uint64_t zapnot_mask(uint8_t lit)
  755. {
  756. uint64_t mask = 0;
  757. int i;
  758. for (i = 0; i < 8; ++i) {
  759. if ((lit >> i) & 1) {
  760. mask |= 0xffull << (i * 8);
  761. }
  762. }
  763. return mask;
  764. }
  765. /* Implement zapnot with an immediate operand, which expands to some
  766. form of immediate AND. This is a basic building block in the
  767. definition of many of the other byte manipulation instructions. */
  768. static void gen_zapnoti(TCGv dest, TCGv src, uint8_t lit)
  769. {
  770. switch (lit) {
  771. case 0x00:
  772. tcg_gen_movi_i64(dest, 0);
  773. break;
  774. case 0x01:
  775. tcg_gen_ext8u_i64(dest, src);
  776. break;
  777. case 0x03:
  778. tcg_gen_ext16u_i64(dest, src);
  779. break;
  780. case 0x0f:
  781. tcg_gen_ext32u_i64(dest, src);
  782. break;
  783. case 0xff:
  784. tcg_gen_mov_i64(dest, src);
  785. break;
  786. default:
  787. tcg_gen_andi_i64(dest, src, zapnot_mask(lit));
  788. break;
  789. }
  790. }
  791. /* EXTWH, EXTLH, EXTQH */
  792. static void gen_ext_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  793. uint8_t lit, uint8_t byte_mask)
  794. {
  795. if (islit) {
  796. tcg_gen_shli_i64(vc, va, (64 - lit * 8) & 0x3f);
  797. } else {
  798. TCGv tmp = tcg_temp_new();
  799. tcg_gen_shli_i64(tmp, load_gpr(ctx, rb), 3);
  800. tcg_gen_neg_i64(tmp, tmp);
  801. tcg_gen_andi_i64(tmp, tmp, 0x3f);
  802. tcg_gen_shl_i64(vc, va, tmp);
  803. tcg_temp_free(tmp);
  804. }
  805. gen_zapnoti(vc, vc, byte_mask);
  806. }
  807. /* EXTBL, EXTWL, EXTLL, EXTQL */
  808. static void gen_ext_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  809. uint8_t lit, uint8_t byte_mask)
  810. {
  811. if (islit) {
  812. tcg_gen_shri_i64(vc, va, (lit & 7) * 8);
  813. } else {
  814. TCGv tmp = tcg_temp_new();
  815. tcg_gen_andi_i64(tmp, load_gpr(ctx, rb), 7);
  816. tcg_gen_shli_i64(tmp, tmp, 3);
  817. tcg_gen_shr_i64(vc, va, tmp);
  818. tcg_temp_free(tmp);
  819. }
  820. gen_zapnoti(vc, vc, byte_mask);
  821. }
  822. /* INSWH, INSLH, INSQH */
  823. static void gen_ins_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  824. uint8_t lit, uint8_t byte_mask)
  825. {
  826. TCGv tmp = tcg_temp_new();
  827. /* The instruction description has us left-shift the byte mask and extract
  828. bits <15:8> and apply that zap at the end. This is equivalent to simply
  829. performing the zap first and shifting afterward. */
  830. gen_zapnoti(tmp, va, byte_mask);
  831. if (islit) {
  832. lit &= 7;
  833. if (unlikely(lit == 0)) {
  834. tcg_gen_movi_i64(vc, 0);
  835. } else {
  836. tcg_gen_shri_i64(vc, tmp, 64 - lit * 8);
  837. }
  838. } else {
  839. TCGv shift = tcg_temp_new();
  840. /* If (B & 7) == 0, we need to shift by 64 and leave a zero. Do this
  841. portably by splitting the shift into two parts: shift_count-1 and 1.
  842. Arrange for the -1 by using ones-complement instead of
  843. twos-complement in the negation: ~(B * 8) & 63. */
  844. tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
  845. tcg_gen_not_i64(shift, shift);
  846. tcg_gen_andi_i64(shift, shift, 0x3f);
  847. tcg_gen_shr_i64(vc, tmp, shift);
  848. tcg_gen_shri_i64(vc, vc, 1);
  849. tcg_temp_free(shift);
  850. }
  851. tcg_temp_free(tmp);
  852. }
  853. /* INSBL, INSWL, INSLL, INSQL */
  854. static void gen_ins_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  855. uint8_t lit, uint8_t byte_mask)
  856. {
  857. TCGv tmp = tcg_temp_new();
  858. /* The instruction description has us left-shift the byte mask
  859. the same number of byte slots as the data and apply the zap
  860. at the end. This is equivalent to simply performing the zap
  861. first and shifting afterward. */
  862. gen_zapnoti(tmp, va, byte_mask);
  863. if (islit) {
  864. tcg_gen_shli_i64(vc, tmp, (lit & 7) * 8);
  865. } else {
  866. TCGv shift = tcg_temp_new();
  867. tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
  868. tcg_gen_shli_i64(shift, shift, 3);
  869. tcg_gen_shl_i64(vc, tmp, shift);
  870. tcg_temp_free(shift);
  871. }
  872. tcg_temp_free(tmp);
  873. }
  874. /* MSKWH, MSKLH, MSKQH */
  875. static void gen_msk_h(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  876. uint8_t lit, uint8_t byte_mask)
  877. {
  878. if (islit) {
  879. gen_zapnoti(vc, va, ~((byte_mask << (lit & 7)) >> 8));
  880. } else {
  881. TCGv shift = tcg_temp_new();
  882. TCGv mask = tcg_temp_new();
  883. /* The instruction description is as above, where the byte_mask
  884. is shifted left, and then we extract bits <15:8>. This can be
  885. emulated with a right-shift on the expanded byte mask. This
  886. requires extra care because for an input <2:0> == 0 we need a
  887. shift of 64 bits in order to generate a zero. This is done by
  888. splitting the shift into two parts, the variable shift - 1
  889. followed by a constant 1 shift. The code we expand below is
  890. equivalent to ~(B * 8) & 63. */
  891. tcg_gen_shli_i64(shift, load_gpr(ctx, rb), 3);
  892. tcg_gen_not_i64(shift, shift);
  893. tcg_gen_andi_i64(shift, shift, 0x3f);
  894. tcg_gen_movi_i64(mask, zapnot_mask (byte_mask));
  895. tcg_gen_shr_i64(mask, mask, shift);
  896. tcg_gen_shri_i64(mask, mask, 1);
  897. tcg_gen_andc_i64(vc, va, mask);
  898. tcg_temp_free(mask);
  899. tcg_temp_free(shift);
  900. }
  901. }
  902. /* MSKBL, MSKWL, MSKLL, MSKQL */
  903. static void gen_msk_l(DisasContext *ctx, TCGv vc, TCGv va, int rb, bool islit,
  904. uint8_t lit, uint8_t byte_mask)
  905. {
  906. if (islit) {
  907. gen_zapnoti(vc, va, ~(byte_mask << (lit & 7)));
  908. } else {
  909. TCGv shift = tcg_temp_new();
  910. TCGv mask = tcg_temp_new();
  911. tcg_gen_andi_i64(shift, load_gpr(ctx, rb), 7);
  912. tcg_gen_shli_i64(shift, shift, 3);
  913. tcg_gen_movi_i64(mask, zapnot_mask(byte_mask));
  914. tcg_gen_shl_i64(mask, mask, shift);
  915. tcg_gen_andc_i64(vc, va, mask);
  916. tcg_temp_free(mask);
  917. tcg_temp_free(shift);
  918. }
  919. }
  920. static void gen_rx(DisasContext *ctx, int ra, int set)
  921. {
  922. TCGv_i32 tmp;
  923. if (ra != 31) {
  924. tcg_gen_ld8u_i64(ctx->ir[ra], cpu_env,
  925. offsetof(CPUAlphaState, intr_flag));
  926. }
  927. tmp = tcg_const_i32(set);
  928. tcg_gen_st8_i32(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
  929. tcg_temp_free_i32(tmp);
  930. }
  931. static ExitStatus gen_call_pal(DisasContext *ctx, int palcode)
  932. {
  933. /* We're emulating OSF/1 PALcode. Many of these are trivial access
  934. to internal cpu registers. */
  935. /* Unprivileged PAL call */
  936. if (palcode >= 0x80 && palcode < 0xC0) {
  937. switch (palcode) {
  938. case 0x86:
  939. /* IMB */
  940. /* No-op inside QEMU. */
  941. break;
  942. case 0x9E:
  943. /* RDUNIQUE */
  944. tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
  945. offsetof(CPUAlphaState, unique));
  946. break;
  947. case 0x9F:
  948. /* WRUNIQUE */
  949. tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
  950. offsetof(CPUAlphaState, unique));
  951. break;
  952. default:
  953. palcode &= 0xbf;
  954. goto do_call_pal;
  955. }
  956. return NO_EXIT;
  957. }
  958. #ifndef CONFIG_USER_ONLY
  959. /* Privileged PAL code */
  960. if (palcode < 0x40 && (ctx->tb->flags & TB_FLAGS_USER_MODE) == 0) {
  961. switch (palcode) {
  962. case 0x01:
  963. /* CFLUSH */
  964. /* No-op inside QEMU. */
  965. break;
  966. case 0x02:
  967. /* DRAINA */
  968. /* No-op inside QEMU. */
  969. break;
  970. case 0x2D:
  971. /* WRVPTPTR */
  972. tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
  973. offsetof(CPUAlphaState, vptptr));
  974. break;
  975. case 0x31:
  976. /* WRVAL */
  977. tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
  978. offsetof(CPUAlphaState, sysval));
  979. break;
  980. case 0x32:
  981. /* RDVAL */
  982. tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
  983. offsetof(CPUAlphaState, sysval));
  984. break;
  985. case 0x35: {
  986. /* SWPIPL */
  987. TCGv tmp;
  988. /* Note that we already know we're in kernel mode, so we know
  989. that PS only contains the 3 IPL bits. */
  990. tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
  991. offsetof(CPUAlphaState, ps));
  992. /* But make sure and store only the 3 IPL bits from the user. */
  993. tmp = tcg_temp_new();
  994. tcg_gen_andi_i64(tmp, ctx->ir[IR_A0], PS_INT_MASK);
  995. tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, ps));
  996. tcg_temp_free(tmp);
  997. break;
  998. }
  999. case 0x36:
  1000. /* RDPS */
  1001. tcg_gen_ld8u_i64(ctx->ir[IR_V0], cpu_env,
  1002. offsetof(CPUAlphaState, ps));
  1003. break;
  1004. case 0x38:
  1005. /* WRUSP */
  1006. tcg_gen_st_i64(ctx->ir[IR_A0], cpu_env,
  1007. offsetof(CPUAlphaState, usp));
  1008. break;
  1009. case 0x3A:
  1010. /* RDUSP */
  1011. tcg_gen_ld_i64(ctx->ir[IR_V0], cpu_env,
  1012. offsetof(CPUAlphaState, usp));
  1013. break;
  1014. case 0x3C:
  1015. /* WHAMI */
  1016. tcg_gen_ld32s_i64(ctx->ir[IR_V0], cpu_env,
  1017. -offsetof(AlphaCPU, env) + offsetof(CPUState, cpu_index));
  1018. break;
  1019. default:
  1020. palcode &= 0x3f;
  1021. goto do_call_pal;
  1022. }
  1023. return NO_EXIT;
  1024. }
  1025. #endif
  1026. return gen_invalid(ctx);
  1027. do_call_pal:
  1028. #ifdef CONFIG_USER_ONLY
  1029. return gen_excp(ctx, EXCP_CALL_PAL, palcode);
  1030. #else
  1031. {
  1032. TCGv tmp = tcg_temp_new();
  1033. uint64_t exc_addr = ctx->pc;
  1034. uint64_t entry = ctx->palbr;
  1035. if (ctx->tb->flags & TB_FLAGS_PAL_MODE) {
  1036. exc_addr |= 1;
  1037. } else {
  1038. tcg_gen_movi_i64(tmp, 1);
  1039. tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
  1040. }
  1041. tcg_gen_movi_i64(tmp, exc_addr);
  1042. tcg_gen_st_i64(tmp, cpu_env, offsetof(CPUAlphaState, exc_addr));
  1043. tcg_temp_free(tmp);
  1044. entry += (palcode & 0x80
  1045. ? 0x2000 + (palcode - 0x80) * 64
  1046. : 0x1000 + palcode * 64);
  1047. /* Since the destination is running in PALmode, we don't really
  1048. need the page permissions check. We'll see the existence of
  1049. the page when we create the TB, and we'll flush all TBs if
  1050. we change the PAL base register. */
  1051. if (!ctx->singlestep_enabled && !(ctx->tb->cflags & CF_LAST_IO)) {
  1052. tcg_gen_goto_tb(0);
  1053. tcg_gen_movi_i64(cpu_pc, entry);
  1054. tcg_gen_exit_tb((uintptr_t)ctx->tb);
  1055. return EXIT_GOTO_TB;
  1056. } else {
  1057. tcg_gen_movi_i64(cpu_pc, entry);
  1058. return EXIT_PC_UPDATED;
  1059. }
  1060. }
  1061. #endif
  1062. }
  1063. #ifndef CONFIG_USER_ONLY
  1064. #define PR_BYTE 0x100000
  1065. #define PR_LONG 0x200000
  1066. static int cpu_pr_data(int pr)
  1067. {
  1068. switch (pr) {
  1069. case 0: return offsetof(CPUAlphaState, ps) | PR_BYTE;
  1070. case 1: return offsetof(CPUAlphaState, fen) | PR_BYTE;
  1071. case 2: return offsetof(CPUAlphaState, pcc_ofs) | PR_LONG;
  1072. case 3: return offsetof(CPUAlphaState, trap_arg0);
  1073. case 4: return offsetof(CPUAlphaState, trap_arg1);
  1074. case 5: return offsetof(CPUAlphaState, trap_arg2);
  1075. case 6: return offsetof(CPUAlphaState, exc_addr);
  1076. case 7: return offsetof(CPUAlphaState, palbr);
  1077. case 8: return offsetof(CPUAlphaState, ptbr);
  1078. case 9: return offsetof(CPUAlphaState, vptptr);
  1079. case 10: return offsetof(CPUAlphaState, unique);
  1080. case 11: return offsetof(CPUAlphaState, sysval);
  1081. case 12: return offsetof(CPUAlphaState, usp);
  1082. case 40 ... 63:
  1083. return offsetof(CPUAlphaState, scratch[pr - 40]);
  1084. case 251:
  1085. return offsetof(CPUAlphaState, alarm_expire);
  1086. }
  1087. return 0;
  1088. }
  1089. static ExitStatus gen_mfpr(DisasContext *ctx, TCGv va, int regno)
  1090. {
  1091. void (*helper)(TCGv);
  1092. int data;
  1093. switch (regno) {
  1094. case 32 ... 39:
  1095. /* Accessing the "non-shadow" general registers. */
  1096. regno = regno == 39 ? 25 : regno - 32 + 8;
  1097. tcg_gen_mov_i64(va, cpu_std_ir[regno]);
  1098. break;
  1099. case 250: /* WALLTIME */
  1100. helper = gen_helper_get_walltime;
  1101. goto do_helper;
  1102. case 249: /* VMTIME */
  1103. helper = gen_helper_get_vmtime;
  1104. do_helper:
  1105. if (use_icount) {
  1106. gen_io_start();
  1107. helper(va);
  1108. gen_io_end();
  1109. return EXIT_PC_STALE;
  1110. } else {
  1111. helper(va);
  1112. }
  1113. break;
  1114. default:
  1115. /* The basic registers are data only, and unknown registers
  1116. are read-zero, write-ignore. */
  1117. data = cpu_pr_data(regno);
  1118. if (data == 0) {
  1119. tcg_gen_movi_i64(va, 0);
  1120. } else if (data & PR_BYTE) {
  1121. tcg_gen_ld8u_i64(va, cpu_env, data & ~PR_BYTE);
  1122. } else if (data & PR_LONG) {
  1123. tcg_gen_ld32s_i64(va, cpu_env, data & ~PR_LONG);
  1124. } else {
  1125. tcg_gen_ld_i64(va, cpu_env, data);
  1126. }
  1127. break;
  1128. }
  1129. return NO_EXIT;
  1130. }
  1131. static ExitStatus gen_mtpr(DisasContext *ctx, TCGv vb, int regno)
  1132. {
  1133. TCGv tmp;
  1134. int data;
  1135. switch (regno) {
  1136. case 255:
  1137. /* TBIA */
  1138. gen_helper_tbia(cpu_env);
  1139. break;
  1140. case 254:
  1141. /* TBIS */
  1142. gen_helper_tbis(cpu_env, vb);
  1143. break;
  1144. case 253:
  1145. /* WAIT */
  1146. tmp = tcg_const_i64(1);
  1147. tcg_gen_st32_i64(tmp, cpu_env, -offsetof(AlphaCPU, env) +
  1148. offsetof(CPUState, halted));
  1149. return gen_excp(ctx, EXCP_HLT, 0);
  1150. case 252:
  1151. /* HALT */
  1152. gen_helper_halt(vb);
  1153. return EXIT_PC_STALE;
  1154. case 251:
  1155. /* ALARM */
  1156. gen_helper_set_alarm(cpu_env, vb);
  1157. break;
  1158. case 7:
  1159. /* PALBR */
  1160. tcg_gen_st_i64(vb, cpu_env, offsetof(CPUAlphaState, palbr));
  1161. /* Changing the PAL base register implies un-chaining all of the TBs
  1162. that ended with a CALL_PAL. Since the base register usually only
  1163. changes during boot, flushing everything works well. */
  1164. gen_helper_tb_flush(cpu_env);
  1165. return EXIT_PC_STALE;
  1166. case 32 ... 39:
  1167. /* Accessing the "non-shadow" general registers. */
  1168. regno = regno == 39 ? 25 : regno - 32 + 8;
  1169. tcg_gen_mov_i64(cpu_std_ir[regno], vb);
  1170. break;
  1171. default:
  1172. /* The basic registers are data only, and unknown registers
  1173. are read-zero, write-ignore. */
  1174. data = cpu_pr_data(regno);
  1175. if (data != 0) {
  1176. if (data & PR_BYTE) {
  1177. tcg_gen_st8_i64(vb, cpu_env, data & ~PR_BYTE);
  1178. } else if (data & PR_LONG) {
  1179. tcg_gen_st32_i64(vb, cpu_env, data & ~PR_LONG);
  1180. } else {
  1181. tcg_gen_st_i64(vb, cpu_env, data);
  1182. }
  1183. }
  1184. break;
  1185. }
  1186. return NO_EXIT;
  1187. }
  1188. #endif /* !USER_ONLY*/
  1189. #define REQUIRE_NO_LIT \
  1190. do { \
  1191. if (real_islit) { \
  1192. goto invalid_opc; \
  1193. } \
  1194. } while (0)
  1195. #define REQUIRE_TB_FLAG(FLAG) \
  1196. do { \
  1197. if ((ctx->tb->flags & (FLAG)) == 0) { \
  1198. goto invalid_opc; \
  1199. } \
  1200. } while (0)
  1201. #define REQUIRE_REG_31(WHICH) \
  1202. do { \
  1203. if (WHICH != 31) { \
  1204. goto invalid_opc; \
  1205. } \
  1206. } while (0)
  1207. static ExitStatus translate_one(DisasContext *ctx, uint32_t insn)
  1208. {
  1209. int32_t disp21, disp16, disp12 __attribute__((unused));
  1210. uint16_t fn11;
  1211. uint8_t opc, ra, rb, rc, fpfn, fn7, lit;
  1212. bool islit, real_islit;
  1213. TCGv va, vb, vc, tmp, tmp2;
  1214. TCGv_i32 t32;
  1215. ExitStatus ret;
  1216. /* Decode all instruction fields */
  1217. opc = extract32(insn, 26, 6);
  1218. ra = extract32(insn, 21, 5);
  1219. rb = extract32(insn, 16, 5);
  1220. rc = extract32(insn, 0, 5);
  1221. real_islit = islit = extract32(insn, 12, 1);
  1222. lit = extract32(insn, 13, 8);
  1223. disp21 = sextract32(insn, 0, 21);
  1224. disp16 = sextract32(insn, 0, 16);
  1225. disp12 = sextract32(insn, 0, 12);
  1226. fn11 = extract32(insn, 5, 11);
  1227. fpfn = extract32(insn, 5, 6);
  1228. fn7 = extract32(insn, 5, 7);
  1229. if (rb == 31 && !islit) {
  1230. islit = true;
  1231. lit = 0;
  1232. }
  1233. ret = NO_EXIT;
  1234. switch (opc) {
  1235. case 0x00:
  1236. /* CALL_PAL */
  1237. ret = gen_call_pal(ctx, insn & 0x03ffffff);
  1238. break;
  1239. case 0x01:
  1240. /* OPC01 */
  1241. goto invalid_opc;
  1242. case 0x02:
  1243. /* OPC02 */
  1244. goto invalid_opc;
  1245. case 0x03:
  1246. /* OPC03 */
  1247. goto invalid_opc;
  1248. case 0x04:
  1249. /* OPC04 */
  1250. goto invalid_opc;
  1251. case 0x05:
  1252. /* OPC05 */
  1253. goto invalid_opc;
  1254. case 0x06:
  1255. /* OPC06 */
  1256. goto invalid_opc;
  1257. case 0x07:
  1258. /* OPC07 */
  1259. goto invalid_opc;
  1260. case 0x09:
  1261. /* LDAH */
  1262. disp16 = (uint32_t)disp16 << 16;
  1263. /* fall through */
  1264. case 0x08:
  1265. /* LDA */
  1266. va = dest_gpr(ctx, ra);
  1267. /* It's worth special-casing immediate loads. */
  1268. if (rb == 31) {
  1269. tcg_gen_movi_i64(va, disp16);
  1270. } else {
  1271. tcg_gen_addi_i64(va, load_gpr(ctx, rb), disp16);
  1272. }
  1273. break;
  1274. case 0x0A:
  1275. /* LDBU */
  1276. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  1277. gen_load_mem(ctx, &tcg_gen_qemu_ld8u, ra, rb, disp16, 0, 0);
  1278. break;
  1279. case 0x0B:
  1280. /* LDQ_U */
  1281. gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 1);
  1282. break;
  1283. case 0x0C:
  1284. /* LDWU */
  1285. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  1286. gen_load_mem(ctx, &tcg_gen_qemu_ld16u, ra, rb, disp16, 0, 0);
  1287. break;
  1288. case 0x0D:
  1289. /* STW */
  1290. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  1291. gen_store_mem(ctx, &tcg_gen_qemu_st16, ra, rb, disp16, 0, 0);
  1292. break;
  1293. case 0x0E:
  1294. /* STB */
  1295. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  1296. gen_store_mem(ctx, &tcg_gen_qemu_st8, ra, rb, disp16, 0, 0);
  1297. break;
  1298. case 0x0F:
  1299. /* STQ_U */
  1300. gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 1);
  1301. break;
  1302. case 0x10:
  1303. vc = dest_gpr(ctx, rc);
  1304. vb = load_gpr_lit(ctx, rb, lit, islit);
  1305. if (ra == 31) {
  1306. if (fn7 == 0x00) {
  1307. /* Special case ADDL as SEXTL. */
  1308. tcg_gen_ext32s_i64(vc, vb);
  1309. break;
  1310. }
  1311. if (fn7 == 0x29) {
  1312. /* Special case SUBQ as NEGQ. */
  1313. tcg_gen_neg_i64(vc, vb);
  1314. break;
  1315. }
  1316. }
  1317. va = load_gpr(ctx, ra);
  1318. switch (fn7) {
  1319. case 0x00:
  1320. /* ADDL */
  1321. tcg_gen_add_i64(vc, va, vb);
  1322. tcg_gen_ext32s_i64(vc, vc);
  1323. break;
  1324. case 0x02:
  1325. /* S4ADDL */
  1326. tmp = tcg_temp_new();
  1327. tcg_gen_shli_i64(tmp, va, 2);
  1328. tcg_gen_add_i64(tmp, tmp, vb);
  1329. tcg_gen_ext32s_i64(vc, tmp);
  1330. tcg_temp_free(tmp);
  1331. break;
  1332. case 0x09:
  1333. /* SUBL */
  1334. tcg_gen_sub_i64(vc, va, vb);
  1335. tcg_gen_ext32s_i64(vc, vc);
  1336. break;
  1337. case 0x0B:
  1338. /* S4SUBL */
  1339. tmp = tcg_temp_new();
  1340. tcg_gen_shli_i64(tmp, va, 2);
  1341. tcg_gen_sub_i64(tmp, tmp, vb);
  1342. tcg_gen_ext32s_i64(vc, tmp);
  1343. tcg_temp_free(tmp);
  1344. break;
  1345. case 0x0F:
  1346. /* CMPBGE */
  1347. if (ra == 31) {
  1348. /* Special case 0 >= X as X == 0. */
  1349. gen_helper_cmpbe0(vc, vb);
  1350. } else {
  1351. gen_helper_cmpbge(vc, va, vb);
  1352. }
  1353. break;
  1354. case 0x12:
  1355. /* S8ADDL */
  1356. tmp = tcg_temp_new();
  1357. tcg_gen_shli_i64(tmp, va, 3);
  1358. tcg_gen_add_i64(tmp, tmp, vb);
  1359. tcg_gen_ext32s_i64(vc, tmp);
  1360. tcg_temp_free(tmp);
  1361. break;
  1362. case 0x1B:
  1363. /* S8SUBL */
  1364. tmp = tcg_temp_new();
  1365. tcg_gen_shli_i64(tmp, va, 3);
  1366. tcg_gen_sub_i64(tmp, tmp, vb);
  1367. tcg_gen_ext32s_i64(vc, tmp);
  1368. tcg_temp_free(tmp);
  1369. break;
  1370. case 0x1D:
  1371. /* CMPULT */
  1372. tcg_gen_setcond_i64(TCG_COND_LTU, vc, va, vb);
  1373. break;
  1374. case 0x20:
  1375. /* ADDQ */
  1376. tcg_gen_add_i64(vc, va, vb);
  1377. break;
  1378. case 0x22:
  1379. /* S4ADDQ */
  1380. tmp = tcg_temp_new();
  1381. tcg_gen_shli_i64(tmp, va, 2);
  1382. tcg_gen_add_i64(vc, tmp, vb);
  1383. tcg_temp_free(tmp);
  1384. break;
  1385. case 0x29:
  1386. /* SUBQ */
  1387. tcg_gen_sub_i64(vc, va, vb);
  1388. break;
  1389. case 0x2B:
  1390. /* S4SUBQ */
  1391. tmp = tcg_temp_new();
  1392. tcg_gen_shli_i64(tmp, va, 2);
  1393. tcg_gen_sub_i64(vc, tmp, vb);
  1394. tcg_temp_free(tmp);
  1395. break;
  1396. case 0x2D:
  1397. /* CMPEQ */
  1398. tcg_gen_setcond_i64(TCG_COND_EQ, vc, va, vb);
  1399. break;
  1400. case 0x32:
  1401. /* S8ADDQ */
  1402. tmp = tcg_temp_new();
  1403. tcg_gen_shli_i64(tmp, va, 3);
  1404. tcg_gen_add_i64(vc, tmp, vb);
  1405. tcg_temp_free(tmp);
  1406. break;
  1407. case 0x3B:
  1408. /* S8SUBQ */
  1409. tmp = tcg_temp_new();
  1410. tcg_gen_shli_i64(tmp, va, 3);
  1411. tcg_gen_sub_i64(vc, tmp, vb);
  1412. tcg_temp_free(tmp);
  1413. break;
  1414. case 0x3D:
  1415. /* CMPULE */
  1416. tcg_gen_setcond_i64(TCG_COND_LEU, vc, va, vb);
  1417. break;
  1418. case 0x40:
  1419. /* ADDL/V */
  1420. tmp = tcg_temp_new();
  1421. tcg_gen_ext32s_i64(tmp, va);
  1422. tcg_gen_ext32s_i64(vc, vb);
  1423. tcg_gen_add_i64(tmp, tmp, vc);
  1424. tcg_gen_ext32s_i64(vc, tmp);
  1425. gen_helper_check_overflow(cpu_env, vc, tmp);
  1426. tcg_temp_free(tmp);
  1427. break;
  1428. case 0x49:
  1429. /* SUBL/V */
  1430. tmp = tcg_temp_new();
  1431. tcg_gen_ext32s_i64(tmp, va);
  1432. tcg_gen_ext32s_i64(vc, vb);
  1433. tcg_gen_sub_i64(tmp, tmp, vc);
  1434. tcg_gen_ext32s_i64(vc, tmp);
  1435. gen_helper_check_overflow(cpu_env, vc, tmp);
  1436. tcg_temp_free(tmp);
  1437. break;
  1438. case 0x4D:
  1439. /* CMPLT */
  1440. tcg_gen_setcond_i64(TCG_COND_LT, vc, va, vb);
  1441. break;
  1442. case 0x60:
  1443. /* ADDQ/V */
  1444. tmp = tcg_temp_new();
  1445. tmp2 = tcg_temp_new();
  1446. tcg_gen_eqv_i64(tmp, va, vb);
  1447. tcg_gen_mov_i64(tmp2, va);
  1448. tcg_gen_add_i64(vc, va, vb);
  1449. tcg_gen_xor_i64(tmp2, tmp2, vc);
  1450. tcg_gen_and_i64(tmp, tmp, tmp2);
  1451. tcg_gen_shri_i64(tmp, tmp, 63);
  1452. tcg_gen_movi_i64(tmp2, 0);
  1453. gen_helper_check_overflow(cpu_env, tmp, tmp2);
  1454. tcg_temp_free(tmp);
  1455. tcg_temp_free(tmp2);
  1456. break;
  1457. case 0x69:
  1458. /* SUBQ/V */
  1459. tmp = tcg_temp_new();
  1460. tmp2 = tcg_temp_new();
  1461. tcg_gen_xor_i64(tmp, va, vb);
  1462. tcg_gen_mov_i64(tmp2, va);
  1463. tcg_gen_sub_i64(vc, va, vb);
  1464. tcg_gen_xor_i64(tmp2, tmp2, vc);
  1465. tcg_gen_and_i64(tmp, tmp, tmp2);
  1466. tcg_gen_shri_i64(tmp, tmp, 63);
  1467. tcg_gen_movi_i64(tmp2, 0);
  1468. gen_helper_check_overflow(cpu_env, tmp, tmp2);
  1469. tcg_temp_free(tmp);
  1470. tcg_temp_free(tmp2);
  1471. break;
  1472. case 0x6D:
  1473. /* CMPLE */
  1474. tcg_gen_setcond_i64(TCG_COND_LE, vc, va, vb);
  1475. break;
  1476. default:
  1477. goto invalid_opc;
  1478. }
  1479. break;
  1480. case 0x11:
  1481. if (fn7 == 0x20) {
  1482. if (rc == 31) {
  1483. /* Special case BIS as NOP. */
  1484. break;
  1485. }
  1486. if (ra == 31) {
  1487. /* Special case BIS as MOV. */
  1488. vc = dest_gpr(ctx, rc);
  1489. if (islit) {
  1490. tcg_gen_movi_i64(vc, lit);
  1491. } else {
  1492. tcg_gen_mov_i64(vc, load_gpr(ctx, rb));
  1493. }
  1494. break;
  1495. }
  1496. }
  1497. vc = dest_gpr(ctx, rc);
  1498. vb = load_gpr_lit(ctx, rb, lit, islit);
  1499. if (fn7 == 0x28 && ra == 31) {
  1500. /* Special case ORNOT as NOT. */
  1501. tcg_gen_not_i64(vc, vb);
  1502. break;
  1503. }
  1504. va = load_gpr(ctx, ra);
  1505. switch (fn7) {
  1506. case 0x00:
  1507. /* AND */
  1508. tcg_gen_and_i64(vc, va, vb);
  1509. break;
  1510. case 0x08:
  1511. /* BIC */
  1512. tcg_gen_andc_i64(vc, va, vb);
  1513. break;
  1514. case 0x14:
  1515. /* CMOVLBS */
  1516. tmp = tcg_temp_new();
  1517. tcg_gen_andi_i64(tmp, va, 1);
  1518. tcg_gen_movcond_i64(TCG_COND_NE, vc, tmp, load_zero(ctx),
  1519. vb, load_gpr(ctx, rc));
  1520. tcg_temp_free(tmp);
  1521. break;
  1522. case 0x16:
  1523. /* CMOVLBC */
  1524. tmp = tcg_temp_new();
  1525. tcg_gen_andi_i64(tmp, va, 1);
  1526. tcg_gen_movcond_i64(TCG_COND_EQ, vc, tmp, load_zero(ctx),
  1527. vb, load_gpr(ctx, rc));
  1528. tcg_temp_free(tmp);
  1529. break;
  1530. case 0x20:
  1531. /* BIS */
  1532. tcg_gen_or_i64(vc, va, vb);
  1533. break;
  1534. case 0x24:
  1535. /* CMOVEQ */
  1536. tcg_gen_movcond_i64(TCG_COND_EQ, vc, va, load_zero(ctx),
  1537. vb, load_gpr(ctx, rc));
  1538. break;
  1539. case 0x26:
  1540. /* CMOVNE */
  1541. tcg_gen_movcond_i64(TCG_COND_NE, vc, va, load_zero(ctx),
  1542. vb, load_gpr(ctx, rc));
  1543. break;
  1544. case 0x28:
  1545. /* ORNOT */
  1546. tcg_gen_orc_i64(vc, va, vb);
  1547. break;
  1548. case 0x40:
  1549. /* XOR */
  1550. tcg_gen_xor_i64(vc, va, vb);
  1551. break;
  1552. case 0x44:
  1553. /* CMOVLT */
  1554. tcg_gen_movcond_i64(TCG_COND_LT, vc, va, load_zero(ctx),
  1555. vb, load_gpr(ctx, rc));
  1556. break;
  1557. case 0x46:
  1558. /* CMOVGE */
  1559. tcg_gen_movcond_i64(TCG_COND_GE, vc, va, load_zero(ctx),
  1560. vb, load_gpr(ctx, rc));
  1561. break;
  1562. case 0x48:
  1563. /* EQV */
  1564. tcg_gen_eqv_i64(vc, va, vb);
  1565. break;
  1566. case 0x61:
  1567. /* AMASK */
  1568. REQUIRE_REG_31(ra);
  1569. {
  1570. uint64_t amask = ctx->tb->flags >> TB_FLAGS_AMASK_SHIFT;
  1571. tcg_gen_andi_i64(vc, vb, ~amask);
  1572. }
  1573. break;
  1574. case 0x64:
  1575. /* CMOVLE */
  1576. tcg_gen_movcond_i64(TCG_COND_LE, vc, va, load_zero(ctx),
  1577. vb, load_gpr(ctx, rc));
  1578. break;
  1579. case 0x66:
  1580. /* CMOVGT */
  1581. tcg_gen_movcond_i64(TCG_COND_GT, vc, va, load_zero(ctx),
  1582. vb, load_gpr(ctx, rc));
  1583. break;
  1584. case 0x6C:
  1585. /* IMPLVER */
  1586. REQUIRE_REG_31(ra);
  1587. tcg_gen_movi_i64(vc, ctx->implver);
  1588. break;
  1589. default:
  1590. goto invalid_opc;
  1591. }
  1592. break;
  1593. case 0x12:
  1594. vc = dest_gpr(ctx, rc);
  1595. va = load_gpr(ctx, ra);
  1596. switch (fn7) {
  1597. case 0x02:
  1598. /* MSKBL */
  1599. gen_msk_l(ctx, vc, va, rb, islit, lit, 0x01);
  1600. break;
  1601. case 0x06:
  1602. /* EXTBL */
  1603. gen_ext_l(ctx, vc, va, rb, islit, lit, 0x01);
  1604. break;
  1605. case 0x0B:
  1606. /* INSBL */
  1607. gen_ins_l(ctx, vc, va, rb, islit, lit, 0x01);
  1608. break;
  1609. case 0x12:
  1610. /* MSKWL */
  1611. gen_msk_l(ctx, vc, va, rb, islit, lit, 0x03);
  1612. break;
  1613. case 0x16:
  1614. /* EXTWL */
  1615. gen_ext_l(ctx, vc, va, rb, islit, lit, 0x03);
  1616. break;
  1617. case 0x1B:
  1618. /* INSWL */
  1619. gen_ins_l(ctx, vc, va, rb, islit, lit, 0x03);
  1620. break;
  1621. case 0x22:
  1622. /* MSKLL */
  1623. gen_msk_l(ctx, vc, va, rb, islit, lit, 0x0f);
  1624. break;
  1625. case 0x26:
  1626. /* EXTLL */
  1627. gen_ext_l(ctx, vc, va, rb, islit, lit, 0x0f);
  1628. break;
  1629. case 0x2B:
  1630. /* INSLL */
  1631. gen_ins_l(ctx, vc, va, rb, islit, lit, 0x0f);
  1632. break;
  1633. case 0x30:
  1634. /* ZAP */
  1635. if (islit) {
  1636. gen_zapnoti(vc, va, ~lit);
  1637. } else {
  1638. gen_helper_zap(vc, va, load_gpr(ctx, rb));
  1639. }
  1640. break;
  1641. case 0x31:
  1642. /* ZAPNOT */
  1643. if (islit) {
  1644. gen_zapnoti(vc, va, lit);
  1645. } else {
  1646. gen_helper_zapnot(vc, va, load_gpr(ctx, rb));
  1647. }
  1648. break;
  1649. case 0x32:
  1650. /* MSKQL */
  1651. gen_msk_l(ctx, vc, va, rb, islit, lit, 0xff);
  1652. break;
  1653. case 0x34:
  1654. /* SRL */
  1655. if (islit) {
  1656. tcg_gen_shri_i64(vc, va, lit & 0x3f);
  1657. } else {
  1658. tmp = tcg_temp_new();
  1659. vb = load_gpr(ctx, rb);
  1660. tcg_gen_andi_i64(tmp, vb, 0x3f);
  1661. tcg_gen_shr_i64(vc, va, tmp);
  1662. tcg_temp_free(tmp);
  1663. }
  1664. break;
  1665. case 0x36:
  1666. /* EXTQL */
  1667. gen_ext_l(ctx, vc, va, rb, islit, lit, 0xff);
  1668. break;
  1669. case 0x39:
  1670. /* SLL */
  1671. if (islit) {
  1672. tcg_gen_shli_i64(vc, va, lit & 0x3f);
  1673. } else {
  1674. tmp = tcg_temp_new();
  1675. vb = load_gpr(ctx, rb);
  1676. tcg_gen_andi_i64(tmp, vb, 0x3f);
  1677. tcg_gen_shl_i64(vc, va, tmp);
  1678. tcg_temp_free(tmp);
  1679. }
  1680. break;
  1681. case 0x3B:
  1682. /* INSQL */
  1683. gen_ins_l(ctx, vc, va, rb, islit, lit, 0xff);
  1684. break;
  1685. case 0x3C:
  1686. /* SRA */
  1687. if (islit) {
  1688. tcg_gen_sari_i64(vc, va, lit & 0x3f);
  1689. } else {
  1690. tmp = tcg_temp_new();
  1691. vb = load_gpr(ctx, rb);
  1692. tcg_gen_andi_i64(tmp, vb, 0x3f);
  1693. tcg_gen_sar_i64(vc, va, tmp);
  1694. tcg_temp_free(tmp);
  1695. }
  1696. break;
  1697. case 0x52:
  1698. /* MSKWH */
  1699. gen_msk_h(ctx, vc, va, rb, islit, lit, 0x03);
  1700. break;
  1701. case 0x57:
  1702. /* INSWH */
  1703. gen_ins_h(ctx, vc, va, rb, islit, lit, 0x03);
  1704. break;
  1705. case 0x5A:
  1706. /* EXTWH */
  1707. gen_ext_h(ctx, vc, va, rb, islit, lit, 0x03);
  1708. break;
  1709. case 0x62:
  1710. /* MSKLH */
  1711. gen_msk_h(ctx, vc, va, rb, islit, lit, 0x0f);
  1712. break;
  1713. case 0x67:
  1714. /* INSLH */
  1715. gen_ins_h(ctx, vc, va, rb, islit, lit, 0x0f);
  1716. break;
  1717. case 0x6A:
  1718. /* EXTLH */
  1719. gen_ext_h(ctx, vc, va, rb, islit, lit, 0x0f);
  1720. break;
  1721. case 0x72:
  1722. /* MSKQH */
  1723. gen_msk_h(ctx, vc, va, rb, islit, lit, 0xff);
  1724. break;
  1725. case 0x77:
  1726. /* INSQH */
  1727. gen_ins_h(ctx, vc, va, rb, islit, lit, 0xff);
  1728. break;
  1729. case 0x7A:
  1730. /* EXTQH */
  1731. gen_ext_h(ctx, vc, va, rb, islit, lit, 0xff);
  1732. break;
  1733. default:
  1734. goto invalid_opc;
  1735. }
  1736. break;
  1737. case 0x13:
  1738. vc = dest_gpr(ctx, rc);
  1739. vb = load_gpr_lit(ctx, rb, lit, islit);
  1740. va = load_gpr(ctx, ra);
  1741. switch (fn7) {
  1742. case 0x00:
  1743. /* MULL */
  1744. tcg_gen_mul_i64(vc, va, vb);
  1745. tcg_gen_ext32s_i64(vc, vc);
  1746. break;
  1747. case 0x20:
  1748. /* MULQ */
  1749. tcg_gen_mul_i64(vc, va, vb);
  1750. break;
  1751. case 0x30:
  1752. /* UMULH */
  1753. tmp = tcg_temp_new();
  1754. tcg_gen_mulu2_i64(tmp, vc, va, vb);
  1755. tcg_temp_free(tmp);
  1756. break;
  1757. case 0x40:
  1758. /* MULL/V */
  1759. tmp = tcg_temp_new();
  1760. tcg_gen_ext32s_i64(tmp, va);
  1761. tcg_gen_ext32s_i64(vc, vb);
  1762. tcg_gen_mul_i64(tmp, tmp, vc);
  1763. tcg_gen_ext32s_i64(vc, tmp);
  1764. gen_helper_check_overflow(cpu_env, vc, tmp);
  1765. tcg_temp_free(tmp);
  1766. break;
  1767. case 0x60:
  1768. /* MULQ/V */
  1769. tmp = tcg_temp_new();
  1770. tmp2 = tcg_temp_new();
  1771. tcg_gen_muls2_i64(vc, tmp, va, vb);
  1772. tcg_gen_sari_i64(tmp2, vc, 63);
  1773. gen_helper_check_overflow(cpu_env, tmp, tmp2);
  1774. tcg_temp_free(tmp);
  1775. tcg_temp_free(tmp2);
  1776. break;
  1777. default:
  1778. goto invalid_opc;
  1779. }
  1780. break;
  1781. case 0x14:
  1782. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
  1783. vc = dest_fpr(ctx, rc);
  1784. switch (fpfn) { /* fn11 & 0x3F */
  1785. case 0x04:
  1786. /* ITOFS */
  1787. REQUIRE_REG_31(rb);
  1788. t32 = tcg_temp_new_i32();
  1789. va = load_gpr(ctx, ra);
  1790. tcg_gen_extrl_i64_i32(t32, va);
  1791. gen_helper_memory_to_s(vc, t32);
  1792. tcg_temp_free_i32(t32);
  1793. break;
  1794. case 0x0A:
  1795. /* SQRTF */
  1796. REQUIRE_REG_31(ra);
  1797. vb = load_fpr(ctx, rb);
  1798. gen_helper_sqrtf(vc, cpu_env, vb);
  1799. break;
  1800. case 0x0B:
  1801. /* SQRTS */
  1802. REQUIRE_REG_31(ra);
  1803. gen_sqrts(ctx, rb, rc, fn11);
  1804. break;
  1805. case 0x14:
  1806. /* ITOFF */
  1807. REQUIRE_REG_31(rb);
  1808. t32 = tcg_temp_new_i32();
  1809. va = load_gpr(ctx, ra);
  1810. tcg_gen_extrl_i64_i32(t32, va);
  1811. gen_helper_memory_to_f(vc, t32);
  1812. tcg_temp_free_i32(t32);
  1813. break;
  1814. case 0x24:
  1815. /* ITOFT */
  1816. REQUIRE_REG_31(rb);
  1817. va = load_gpr(ctx, ra);
  1818. tcg_gen_mov_i64(vc, va);
  1819. break;
  1820. case 0x2A:
  1821. /* SQRTG */
  1822. REQUIRE_REG_31(ra);
  1823. vb = load_fpr(ctx, rb);
  1824. gen_helper_sqrtg(vc, cpu_env, vb);
  1825. break;
  1826. case 0x02B:
  1827. /* SQRTT */
  1828. REQUIRE_REG_31(ra);
  1829. gen_sqrtt(ctx, rb, rc, fn11);
  1830. break;
  1831. default:
  1832. goto invalid_opc;
  1833. }
  1834. break;
  1835. case 0x15:
  1836. /* VAX floating point */
  1837. /* XXX: rounding mode and trap are ignored (!) */
  1838. vc = dest_fpr(ctx, rc);
  1839. vb = load_fpr(ctx, rb);
  1840. va = load_fpr(ctx, ra);
  1841. switch (fpfn) { /* fn11 & 0x3F */
  1842. case 0x00:
  1843. /* ADDF */
  1844. gen_helper_addf(vc, cpu_env, va, vb);
  1845. break;
  1846. case 0x01:
  1847. /* SUBF */
  1848. gen_helper_subf(vc, cpu_env, va, vb);
  1849. break;
  1850. case 0x02:
  1851. /* MULF */
  1852. gen_helper_mulf(vc, cpu_env, va, vb);
  1853. break;
  1854. case 0x03:
  1855. /* DIVF */
  1856. gen_helper_divf(vc, cpu_env, va, vb);
  1857. break;
  1858. case 0x1E:
  1859. /* CVTDG -- TODO */
  1860. REQUIRE_REG_31(ra);
  1861. goto invalid_opc;
  1862. case 0x20:
  1863. /* ADDG */
  1864. gen_helper_addg(vc, cpu_env, va, vb);
  1865. break;
  1866. case 0x21:
  1867. /* SUBG */
  1868. gen_helper_subg(vc, cpu_env, va, vb);
  1869. break;
  1870. case 0x22:
  1871. /* MULG */
  1872. gen_helper_mulg(vc, cpu_env, va, vb);
  1873. break;
  1874. case 0x23:
  1875. /* DIVG */
  1876. gen_helper_divg(vc, cpu_env, va, vb);
  1877. break;
  1878. case 0x25:
  1879. /* CMPGEQ */
  1880. gen_helper_cmpgeq(vc, cpu_env, va, vb);
  1881. break;
  1882. case 0x26:
  1883. /* CMPGLT */
  1884. gen_helper_cmpglt(vc, cpu_env, va, vb);
  1885. break;
  1886. case 0x27:
  1887. /* CMPGLE */
  1888. gen_helper_cmpgle(vc, cpu_env, va, vb);
  1889. break;
  1890. case 0x2C:
  1891. /* CVTGF */
  1892. REQUIRE_REG_31(ra);
  1893. gen_helper_cvtgf(vc, cpu_env, vb);
  1894. break;
  1895. case 0x2D:
  1896. /* CVTGD -- TODO */
  1897. REQUIRE_REG_31(ra);
  1898. goto invalid_opc;
  1899. case 0x2F:
  1900. /* CVTGQ */
  1901. REQUIRE_REG_31(ra);
  1902. gen_helper_cvtgq(vc, cpu_env, vb);
  1903. break;
  1904. case 0x3C:
  1905. /* CVTQF */
  1906. REQUIRE_REG_31(ra);
  1907. gen_helper_cvtqf(vc, cpu_env, vb);
  1908. break;
  1909. case 0x3E:
  1910. /* CVTQG */
  1911. REQUIRE_REG_31(ra);
  1912. gen_helper_cvtqg(vc, cpu_env, vb);
  1913. break;
  1914. default:
  1915. goto invalid_opc;
  1916. }
  1917. break;
  1918. case 0x16:
  1919. /* IEEE floating-point */
  1920. switch (fpfn) { /* fn11 & 0x3F */
  1921. case 0x00:
  1922. /* ADDS */
  1923. gen_adds(ctx, ra, rb, rc, fn11);
  1924. break;
  1925. case 0x01:
  1926. /* SUBS */
  1927. gen_subs(ctx, ra, rb, rc, fn11);
  1928. break;
  1929. case 0x02:
  1930. /* MULS */
  1931. gen_muls(ctx, ra, rb, rc, fn11);
  1932. break;
  1933. case 0x03:
  1934. /* DIVS */
  1935. gen_divs(ctx, ra, rb, rc, fn11);
  1936. break;
  1937. case 0x20:
  1938. /* ADDT */
  1939. gen_addt(ctx, ra, rb, rc, fn11);
  1940. break;
  1941. case 0x21:
  1942. /* SUBT */
  1943. gen_subt(ctx, ra, rb, rc, fn11);
  1944. break;
  1945. case 0x22:
  1946. /* MULT */
  1947. gen_mult(ctx, ra, rb, rc, fn11);
  1948. break;
  1949. case 0x23:
  1950. /* DIVT */
  1951. gen_divt(ctx, ra, rb, rc, fn11);
  1952. break;
  1953. case 0x24:
  1954. /* CMPTUN */
  1955. gen_cmptun(ctx, ra, rb, rc, fn11);
  1956. break;
  1957. case 0x25:
  1958. /* CMPTEQ */
  1959. gen_cmpteq(ctx, ra, rb, rc, fn11);
  1960. break;
  1961. case 0x26:
  1962. /* CMPTLT */
  1963. gen_cmptlt(ctx, ra, rb, rc, fn11);
  1964. break;
  1965. case 0x27:
  1966. /* CMPTLE */
  1967. gen_cmptle(ctx, ra, rb, rc, fn11);
  1968. break;
  1969. case 0x2C:
  1970. REQUIRE_REG_31(ra);
  1971. if (fn11 == 0x2AC || fn11 == 0x6AC) {
  1972. /* CVTST */
  1973. gen_cvtst(ctx, rb, rc, fn11);
  1974. } else {
  1975. /* CVTTS */
  1976. gen_cvtts(ctx, rb, rc, fn11);
  1977. }
  1978. break;
  1979. case 0x2F:
  1980. /* CVTTQ */
  1981. REQUIRE_REG_31(ra);
  1982. gen_cvttq(ctx, rb, rc, fn11);
  1983. break;
  1984. case 0x3C:
  1985. /* CVTQS */
  1986. REQUIRE_REG_31(ra);
  1987. gen_cvtqs(ctx, rb, rc, fn11);
  1988. break;
  1989. case 0x3E:
  1990. /* CVTQT */
  1991. REQUIRE_REG_31(ra);
  1992. gen_cvtqt(ctx, rb, rc, fn11);
  1993. break;
  1994. default:
  1995. goto invalid_opc;
  1996. }
  1997. break;
  1998. case 0x17:
  1999. switch (fn11) {
  2000. case 0x010:
  2001. /* CVTLQ */
  2002. REQUIRE_REG_31(ra);
  2003. vc = dest_fpr(ctx, rc);
  2004. vb = load_fpr(ctx, rb);
  2005. gen_cvtlq(vc, vb);
  2006. break;
  2007. case 0x020:
  2008. /* CPYS */
  2009. if (rc == 31) {
  2010. /* Special case CPYS as FNOP. */
  2011. } else {
  2012. vc = dest_fpr(ctx, rc);
  2013. va = load_fpr(ctx, ra);
  2014. if (ra == rb) {
  2015. /* Special case CPYS as FMOV. */
  2016. tcg_gen_mov_i64(vc, va);
  2017. } else {
  2018. vb = load_fpr(ctx, rb);
  2019. gen_cpy_mask(vc, va, vb, 0, 0x8000000000000000ULL);
  2020. }
  2021. }
  2022. break;
  2023. case 0x021:
  2024. /* CPYSN */
  2025. vc = dest_fpr(ctx, rc);
  2026. vb = load_fpr(ctx, rb);
  2027. va = load_fpr(ctx, ra);
  2028. gen_cpy_mask(vc, va, vb, 1, 0x8000000000000000ULL);
  2029. break;
  2030. case 0x022:
  2031. /* CPYSE */
  2032. vc = dest_fpr(ctx, rc);
  2033. vb = load_fpr(ctx, rb);
  2034. va = load_fpr(ctx, ra);
  2035. gen_cpy_mask(vc, va, vb, 0, 0xFFF0000000000000ULL);
  2036. break;
  2037. case 0x024:
  2038. /* MT_FPCR */
  2039. va = load_fpr(ctx, ra);
  2040. gen_helper_store_fpcr(cpu_env, va);
  2041. if (ctx->tb_rm == QUAL_RM_D) {
  2042. /* Re-do the copy of the rounding mode to fp_status
  2043. the next time we use dynamic rounding. */
  2044. ctx->tb_rm = -1;
  2045. }
  2046. break;
  2047. case 0x025:
  2048. /* MF_FPCR */
  2049. va = dest_fpr(ctx, ra);
  2050. gen_helper_load_fpcr(va, cpu_env);
  2051. break;
  2052. case 0x02A:
  2053. /* FCMOVEQ */
  2054. gen_fcmov(ctx, TCG_COND_EQ, ra, rb, rc);
  2055. break;
  2056. case 0x02B:
  2057. /* FCMOVNE */
  2058. gen_fcmov(ctx, TCG_COND_NE, ra, rb, rc);
  2059. break;
  2060. case 0x02C:
  2061. /* FCMOVLT */
  2062. gen_fcmov(ctx, TCG_COND_LT, ra, rb, rc);
  2063. break;
  2064. case 0x02D:
  2065. /* FCMOVGE */
  2066. gen_fcmov(ctx, TCG_COND_GE, ra, rb, rc);
  2067. break;
  2068. case 0x02E:
  2069. /* FCMOVLE */
  2070. gen_fcmov(ctx, TCG_COND_LE, ra, rb, rc);
  2071. break;
  2072. case 0x02F:
  2073. /* FCMOVGT */
  2074. gen_fcmov(ctx, TCG_COND_GT, ra, rb, rc);
  2075. break;
  2076. case 0x030: /* CVTQL */
  2077. case 0x130: /* CVTQL/V */
  2078. case 0x530: /* CVTQL/SV */
  2079. REQUIRE_REG_31(ra);
  2080. vc = dest_fpr(ctx, rc);
  2081. vb = load_fpr(ctx, rb);
  2082. gen_helper_cvtql(vc, cpu_env, vb);
  2083. gen_fp_exc_raise(rc, fn11);
  2084. break;
  2085. default:
  2086. goto invalid_opc;
  2087. }
  2088. break;
  2089. case 0x18:
  2090. switch ((uint16_t)disp16) {
  2091. case 0x0000:
  2092. /* TRAPB */
  2093. /* No-op. */
  2094. break;
  2095. case 0x0400:
  2096. /* EXCB */
  2097. /* No-op. */
  2098. break;
  2099. case 0x4000:
  2100. /* MB */
  2101. /* No-op */
  2102. break;
  2103. case 0x4400:
  2104. /* WMB */
  2105. /* No-op */
  2106. break;
  2107. case 0x8000:
  2108. /* FETCH */
  2109. /* No-op */
  2110. break;
  2111. case 0xA000:
  2112. /* FETCH_M */
  2113. /* No-op */
  2114. break;
  2115. case 0xC000:
  2116. /* RPCC */
  2117. va = dest_gpr(ctx, ra);
  2118. if (ctx->tb->cflags & CF_USE_ICOUNT) {
  2119. gen_io_start();
  2120. gen_helper_load_pcc(va, cpu_env);
  2121. gen_io_end();
  2122. ret = EXIT_PC_STALE;
  2123. } else {
  2124. gen_helper_load_pcc(va, cpu_env);
  2125. }
  2126. break;
  2127. case 0xE000:
  2128. /* RC */
  2129. gen_rx(ctx, ra, 0);
  2130. break;
  2131. case 0xE800:
  2132. /* ECB */
  2133. break;
  2134. case 0xF000:
  2135. /* RS */
  2136. gen_rx(ctx, ra, 1);
  2137. break;
  2138. case 0xF800:
  2139. /* WH64 */
  2140. /* No-op */
  2141. break;
  2142. case 0xFC00:
  2143. /* WH64EN */
  2144. /* No-op */
  2145. break;
  2146. default:
  2147. goto invalid_opc;
  2148. }
  2149. break;
  2150. case 0x19:
  2151. /* HW_MFPR (PALcode) */
  2152. #ifndef CONFIG_USER_ONLY
  2153. REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
  2154. va = dest_gpr(ctx, ra);
  2155. ret = gen_mfpr(ctx, va, insn & 0xffff);
  2156. break;
  2157. #else
  2158. goto invalid_opc;
  2159. #endif
  2160. case 0x1A:
  2161. /* JMP, JSR, RET, JSR_COROUTINE. These only differ by the branch
  2162. prediction stack action, which of course we don't implement. */
  2163. vb = load_gpr(ctx, rb);
  2164. tcg_gen_andi_i64(cpu_pc, vb, ~3);
  2165. if (ra != 31) {
  2166. tcg_gen_movi_i64(ctx->ir[ra], ctx->pc);
  2167. }
  2168. ret = EXIT_PC_UPDATED;
  2169. break;
  2170. case 0x1B:
  2171. /* HW_LD (PALcode) */
  2172. #ifndef CONFIG_USER_ONLY
  2173. REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
  2174. {
  2175. TCGv addr = tcg_temp_new();
  2176. vb = load_gpr(ctx, rb);
  2177. va = dest_gpr(ctx, ra);
  2178. tcg_gen_addi_i64(addr, vb, disp12);
  2179. switch ((insn >> 12) & 0xF) {
  2180. case 0x0:
  2181. /* Longword physical access (hw_ldl/p) */
  2182. gen_helper_ldl_phys(va, cpu_env, addr);
  2183. break;
  2184. case 0x1:
  2185. /* Quadword physical access (hw_ldq/p) */
  2186. gen_helper_ldq_phys(va, cpu_env, addr);
  2187. break;
  2188. case 0x2:
  2189. /* Longword physical access with lock (hw_ldl_l/p) */
  2190. gen_helper_ldl_l_phys(va, cpu_env, addr);
  2191. break;
  2192. case 0x3:
  2193. /* Quadword physical access with lock (hw_ldq_l/p) */
  2194. gen_helper_ldq_l_phys(va, cpu_env, addr);
  2195. break;
  2196. case 0x4:
  2197. /* Longword virtual PTE fetch (hw_ldl/v) */
  2198. goto invalid_opc;
  2199. case 0x5:
  2200. /* Quadword virtual PTE fetch (hw_ldq/v) */
  2201. goto invalid_opc;
  2202. break;
  2203. case 0x6:
  2204. /* Invalid */
  2205. goto invalid_opc;
  2206. case 0x7:
  2207. /* Invaliid */
  2208. goto invalid_opc;
  2209. case 0x8:
  2210. /* Longword virtual access (hw_ldl) */
  2211. goto invalid_opc;
  2212. case 0x9:
  2213. /* Quadword virtual access (hw_ldq) */
  2214. goto invalid_opc;
  2215. case 0xA:
  2216. /* Longword virtual access with protection check (hw_ldl/w) */
  2217. tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LESL);
  2218. break;
  2219. case 0xB:
  2220. /* Quadword virtual access with protection check (hw_ldq/w) */
  2221. tcg_gen_qemu_ld_i64(va, addr, MMU_KERNEL_IDX, MO_LEQ);
  2222. break;
  2223. case 0xC:
  2224. /* Longword virtual access with alt access mode (hw_ldl/a)*/
  2225. goto invalid_opc;
  2226. case 0xD:
  2227. /* Quadword virtual access with alt access mode (hw_ldq/a) */
  2228. goto invalid_opc;
  2229. case 0xE:
  2230. /* Longword virtual access with alternate access mode and
  2231. protection checks (hw_ldl/wa) */
  2232. tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LESL);
  2233. break;
  2234. case 0xF:
  2235. /* Quadword virtual access with alternate access mode and
  2236. protection checks (hw_ldq/wa) */
  2237. tcg_gen_qemu_ld_i64(va, addr, MMU_USER_IDX, MO_LEQ);
  2238. break;
  2239. }
  2240. tcg_temp_free(addr);
  2241. break;
  2242. }
  2243. #else
  2244. goto invalid_opc;
  2245. #endif
  2246. case 0x1C:
  2247. vc = dest_gpr(ctx, rc);
  2248. if (fn7 == 0x70) {
  2249. /* FTOIT */
  2250. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
  2251. REQUIRE_REG_31(rb);
  2252. va = load_fpr(ctx, ra);
  2253. tcg_gen_mov_i64(vc, va);
  2254. break;
  2255. } else if (fn7 == 0x78) {
  2256. /* FTOIS */
  2257. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_FIX);
  2258. REQUIRE_REG_31(rb);
  2259. t32 = tcg_temp_new_i32();
  2260. va = load_fpr(ctx, ra);
  2261. gen_helper_s_to_memory(t32, va);
  2262. tcg_gen_ext_i32_i64(vc, t32);
  2263. tcg_temp_free_i32(t32);
  2264. break;
  2265. }
  2266. vb = load_gpr_lit(ctx, rb, lit, islit);
  2267. switch (fn7) {
  2268. case 0x00:
  2269. /* SEXTB */
  2270. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  2271. REQUIRE_REG_31(ra);
  2272. tcg_gen_ext8s_i64(vc, vb);
  2273. break;
  2274. case 0x01:
  2275. /* SEXTW */
  2276. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_BWX);
  2277. REQUIRE_REG_31(ra);
  2278. tcg_gen_ext16s_i64(vc, vb);
  2279. break;
  2280. case 0x30:
  2281. /* CTPOP */
  2282. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
  2283. REQUIRE_REG_31(ra);
  2284. REQUIRE_NO_LIT;
  2285. gen_helper_ctpop(vc, vb);
  2286. break;
  2287. case 0x31:
  2288. /* PERR */
  2289. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2290. REQUIRE_NO_LIT;
  2291. va = load_gpr(ctx, ra);
  2292. gen_helper_perr(vc, va, vb);
  2293. break;
  2294. case 0x32:
  2295. /* CTLZ */
  2296. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
  2297. REQUIRE_REG_31(ra);
  2298. REQUIRE_NO_LIT;
  2299. gen_helper_ctlz(vc, vb);
  2300. break;
  2301. case 0x33:
  2302. /* CTTZ */
  2303. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_CIX);
  2304. REQUIRE_REG_31(ra);
  2305. REQUIRE_NO_LIT;
  2306. gen_helper_cttz(vc, vb);
  2307. break;
  2308. case 0x34:
  2309. /* UNPKBW */
  2310. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2311. REQUIRE_REG_31(ra);
  2312. REQUIRE_NO_LIT;
  2313. gen_helper_unpkbw(vc, vb);
  2314. break;
  2315. case 0x35:
  2316. /* UNPKBL */
  2317. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2318. REQUIRE_REG_31(ra);
  2319. REQUIRE_NO_LIT;
  2320. gen_helper_unpkbl(vc, vb);
  2321. break;
  2322. case 0x36:
  2323. /* PKWB */
  2324. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2325. REQUIRE_REG_31(ra);
  2326. REQUIRE_NO_LIT;
  2327. gen_helper_pkwb(vc, vb);
  2328. break;
  2329. case 0x37:
  2330. /* PKLB */
  2331. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2332. REQUIRE_REG_31(ra);
  2333. REQUIRE_NO_LIT;
  2334. gen_helper_pklb(vc, vb);
  2335. break;
  2336. case 0x38:
  2337. /* MINSB8 */
  2338. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2339. va = load_gpr(ctx, ra);
  2340. gen_helper_minsb8(vc, va, vb);
  2341. break;
  2342. case 0x39:
  2343. /* MINSW4 */
  2344. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2345. va = load_gpr(ctx, ra);
  2346. gen_helper_minsw4(vc, va, vb);
  2347. break;
  2348. case 0x3A:
  2349. /* MINUB8 */
  2350. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2351. va = load_gpr(ctx, ra);
  2352. gen_helper_minub8(vc, va, vb);
  2353. break;
  2354. case 0x3B:
  2355. /* MINUW4 */
  2356. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2357. va = load_gpr(ctx, ra);
  2358. gen_helper_minuw4(vc, va, vb);
  2359. break;
  2360. case 0x3C:
  2361. /* MAXUB8 */
  2362. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2363. va = load_gpr(ctx, ra);
  2364. gen_helper_maxub8(vc, va, vb);
  2365. break;
  2366. case 0x3D:
  2367. /* MAXUW4 */
  2368. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2369. va = load_gpr(ctx, ra);
  2370. gen_helper_maxuw4(vc, va, vb);
  2371. break;
  2372. case 0x3E:
  2373. /* MAXSB8 */
  2374. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2375. va = load_gpr(ctx, ra);
  2376. gen_helper_maxsb8(vc, va, vb);
  2377. break;
  2378. case 0x3F:
  2379. /* MAXSW4 */
  2380. REQUIRE_TB_FLAG(TB_FLAGS_AMASK_MVI);
  2381. va = load_gpr(ctx, ra);
  2382. gen_helper_maxsw4(vc, va, vb);
  2383. break;
  2384. default:
  2385. goto invalid_opc;
  2386. }
  2387. break;
  2388. case 0x1D:
  2389. /* HW_MTPR (PALcode) */
  2390. #ifndef CONFIG_USER_ONLY
  2391. REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
  2392. vb = load_gpr(ctx, rb);
  2393. ret = gen_mtpr(ctx, vb, insn & 0xffff);
  2394. break;
  2395. #else
  2396. goto invalid_opc;
  2397. #endif
  2398. case 0x1E:
  2399. /* HW_RET (PALcode) */
  2400. #ifndef CONFIG_USER_ONLY
  2401. REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
  2402. if (rb == 31) {
  2403. /* Pre-EV6 CPUs interpreted this as HW_REI, loading the return
  2404. address from EXC_ADDR. This turns out to be useful for our
  2405. emulation PALcode, so continue to accept it. */
  2406. ctx->lit = vb = tcg_temp_new();
  2407. tcg_gen_ld_i64(vb, cpu_env, offsetof(CPUAlphaState, exc_addr));
  2408. } else {
  2409. vb = load_gpr(ctx, rb);
  2410. }
  2411. tmp = tcg_temp_new();
  2412. tcg_gen_movi_i64(tmp, 0);
  2413. tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, intr_flag));
  2414. tcg_gen_movi_i64(cpu_lock_addr, -1);
  2415. tcg_gen_andi_i64(tmp, vb, 1);
  2416. tcg_gen_st8_i64(tmp, cpu_env, offsetof(CPUAlphaState, pal_mode));
  2417. tcg_gen_andi_i64(cpu_pc, vb, ~3);
  2418. ret = EXIT_PC_UPDATED;
  2419. break;
  2420. #else
  2421. goto invalid_opc;
  2422. #endif
  2423. case 0x1F:
  2424. /* HW_ST (PALcode) */
  2425. #ifndef CONFIG_USER_ONLY
  2426. REQUIRE_TB_FLAG(TB_FLAGS_PAL_MODE);
  2427. {
  2428. TCGv addr = tcg_temp_new();
  2429. va = load_gpr(ctx, ra);
  2430. vb = load_gpr(ctx, rb);
  2431. tcg_gen_addi_i64(addr, vb, disp12);
  2432. switch ((insn >> 12) & 0xF) {
  2433. case 0x0:
  2434. /* Longword physical access */
  2435. gen_helper_stl_phys(cpu_env, addr, va);
  2436. break;
  2437. case 0x1:
  2438. /* Quadword physical access */
  2439. gen_helper_stq_phys(cpu_env, addr, va);
  2440. break;
  2441. case 0x2:
  2442. /* Longword physical access with lock */
  2443. gen_helper_stl_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
  2444. break;
  2445. case 0x3:
  2446. /* Quadword physical access with lock */
  2447. gen_helper_stq_c_phys(dest_gpr(ctx, ra), cpu_env, addr, va);
  2448. break;
  2449. case 0x4:
  2450. /* Longword virtual access */
  2451. goto invalid_opc;
  2452. case 0x5:
  2453. /* Quadword virtual access */
  2454. goto invalid_opc;
  2455. case 0x6:
  2456. /* Invalid */
  2457. goto invalid_opc;
  2458. case 0x7:
  2459. /* Invalid */
  2460. goto invalid_opc;
  2461. case 0x8:
  2462. /* Invalid */
  2463. goto invalid_opc;
  2464. case 0x9:
  2465. /* Invalid */
  2466. goto invalid_opc;
  2467. case 0xA:
  2468. /* Invalid */
  2469. goto invalid_opc;
  2470. case 0xB:
  2471. /* Invalid */
  2472. goto invalid_opc;
  2473. case 0xC:
  2474. /* Longword virtual access with alternate access mode */
  2475. goto invalid_opc;
  2476. case 0xD:
  2477. /* Quadword virtual access with alternate access mode */
  2478. goto invalid_opc;
  2479. case 0xE:
  2480. /* Invalid */
  2481. goto invalid_opc;
  2482. case 0xF:
  2483. /* Invalid */
  2484. goto invalid_opc;
  2485. }
  2486. tcg_temp_free(addr);
  2487. break;
  2488. }
  2489. #else
  2490. goto invalid_opc;
  2491. #endif
  2492. case 0x20:
  2493. /* LDF */
  2494. gen_load_mem(ctx, &gen_qemu_ldf, ra, rb, disp16, 1, 0);
  2495. break;
  2496. case 0x21:
  2497. /* LDG */
  2498. gen_load_mem(ctx, &gen_qemu_ldg, ra, rb, disp16, 1, 0);
  2499. break;
  2500. case 0x22:
  2501. /* LDS */
  2502. gen_load_mem(ctx, &gen_qemu_lds, ra, rb, disp16, 1, 0);
  2503. break;
  2504. case 0x23:
  2505. /* LDT */
  2506. gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 1, 0);
  2507. break;
  2508. case 0x24:
  2509. /* STF */
  2510. gen_store_mem(ctx, &gen_qemu_stf, ra, rb, disp16, 1, 0);
  2511. break;
  2512. case 0x25:
  2513. /* STG */
  2514. gen_store_mem(ctx, &gen_qemu_stg, ra, rb, disp16, 1, 0);
  2515. break;
  2516. case 0x26:
  2517. /* STS */
  2518. gen_store_mem(ctx, &gen_qemu_sts, ra, rb, disp16, 1, 0);
  2519. break;
  2520. case 0x27:
  2521. /* STT */
  2522. gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 1, 0);
  2523. break;
  2524. case 0x28:
  2525. /* LDL */
  2526. gen_load_mem(ctx, &tcg_gen_qemu_ld32s, ra, rb, disp16, 0, 0);
  2527. break;
  2528. case 0x29:
  2529. /* LDQ */
  2530. gen_load_mem(ctx, &tcg_gen_qemu_ld64, ra, rb, disp16, 0, 0);
  2531. break;
  2532. case 0x2A:
  2533. /* LDL_L */
  2534. gen_load_mem(ctx, &gen_qemu_ldl_l, ra, rb, disp16, 0, 0);
  2535. break;
  2536. case 0x2B:
  2537. /* LDQ_L */
  2538. gen_load_mem(ctx, &gen_qemu_ldq_l, ra, rb, disp16, 0, 0);
  2539. break;
  2540. case 0x2C:
  2541. /* STL */
  2542. gen_store_mem(ctx, &tcg_gen_qemu_st32, ra, rb, disp16, 0, 0);
  2543. break;
  2544. case 0x2D:
  2545. /* STQ */
  2546. gen_store_mem(ctx, &tcg_gen_qemu_st64, ra, rb, disp16, 0, 0);
  2547. break;
  2548. case 0x2E:
  2549. /* STL_C */
  2550. ret = gen_store_conditional(ctx, ra, rb, disp16, 0);
  2551. break;
  2552. case 0x2F:
  2553. /* STQ_C */
  2554. ret = gen_store_conditional(ctx, ra, rb, disp16, 1);
  2555. break;
  2556. case 0x30:
  2557. /* BR */
  2558. ret = gen_bdirect(ctx, ra, disp21);
  2559. break;
  2560. case 0x31: /* FBEQ */
  2561. ret = gen_fbcond(ctx, TCG_COND_EQ, ra, disp21);
  2562. break;
  2563. case 0x32: /* FBLT */
  2564. ret = gen_fbcond(ctx, TCG_COND_LT, ra, disp21);
  2565. break;
  2566. case 0x33: /* FBLE */
  2567. ret = gen_fbcond(ctx, TCG_COND_LE, ra, disp21);
  2568. break;
  2569. case 0x34:
  2570. /* BSR */
  2571. ret = gen_bdirect(ctx, ra, disp21);
  2572. break;
  2573. case 0x35: /* FBNE */
  2574. ret = gen_fbcond(ctx, TCG_COND_NE, ra, disp21);
  2575. break;
  2576. case 0x36: /* FBGE */
  2577. ret = gen_fbcond(ctx, TCG_COND_GE, ra, disp21);
  2578. break;
  2579. case 0x37: /* FBGT */
  2580. ret = gen_fbcond(ctx, TCG_COND_GT, ra, disp21);
  2581. break;
  2582. case 0x38:
  2583. /* BLBC */
  2584. ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 1);
  2585. break;
  2586. case 0x39:
  2587. /* BEQ */
  2588. ret = gen_bcond(ctx, TCG_COND_EQ, ra, disp21, 0);
  2589. break;
  2590. case 0x3A:
  2591. /* BLT */
  2592. ret = gen_bcond(ctx, TCG_COND_LT, ra, disp21, 0);
  2593. break;
  2594. case 0x3B:
  2595. /* BLE */
  2596. ret = gen_bcond(ctx, TCG_COND_LE, ra, disp21, 0);
  2597. break;
  2598. case 0x3C:
  2599. /* BLBS */
  2600. ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 1);
  2601. break;
  2602. case 0x3D:
  2603. /* BNE */
  2604. ret = gen_bcond(ctx, TCG_COND_NE, ra, disp21, 0);
  2605. break;
  2606. case 0x3E:
  2607. /* BGE */
  2608. ret = gen_bcond(ctx, TCG_COND_GE, ra, disp21, 0);
  2609. break;
  2610. case 0x3F:
  2611. /* BGT */
  2612. ret = gen_bcond(ctx, TCG_COND_GT, ra, disp21, 0);
  2613. break;
  2614. invalid_opc:
  2615. ret = gen_invalid(ctx);
  2616. break;
  2617. }
  2618. return ret;
  2619. }
  2620. void gen_intermediate_code(CPUAlphaState *env, struct TranslationBlock *tb)
  2621. {
  2622. AlphaCPU *cpu = alpha_env_get_cpu(env);
  2623. CPUState *cs = CPU(cpu);
  2624. DisasContext ctx, *ctxp = &ctx;
  2625. target_ulong pc_start;
  2626. target_ulong pc_mask;
  2627. uint32_t insn;
  2628. ExitStatus ret;
  2629. int num_insns;
  2630. int max_insns;
  2631. pc_start = tb->pc;
  2632. ctx.tb = tb;
  2633. ctx.pc = pc_start;
  2634. ctx.mem_idx = cpu_mmu_index(env, false);
  2635. ctx.implver = env->implver;
  2636. ctx.singlestep_enabled = cs->singlestep_enabled;
  2637. #ifdef CONFIG_USER_ONLY
  2638. ctx.ir = cpu_std_ir;
  2639. #else
  2640. ctx.palbr = env->palbr;
  2641. ctx.ir = (tb->flags & TB_FLAGS_PAL_MODE ? cpu_pal_ir : cpu_std_ir);
  2642. #endif
  2643. /* ??? Every TB begins with unset rounding mode, to be initialized on
  2644. the first fp insn of the TB. Alternately we could define a proper
  2645. default for every TB (e.g. QUAL_RM_N or QUAL_RM_D) and make sure
  2646. to reset the FP_STATUS to that default at the end of any TB that
  2647. changes the default. We could even (gasp) dynamiclly figure out
  2648. what default would be most efficient given the running program. */
  2649. ctx.tb_rm = -1;
  2650. /* Similarly for flush-to-zero. */
  2651. ctx.tb_ftz = -1;
  2652. num_insns = 0;
  2653. max_insns = tb->cflags & CF_COUNT_MASK;
  2654. if (max_insns == 0) {
  2655. max_insns = CF_COUNT_MASK;
  2656. }
  2657. if (max_insns > TCG_MAX_INSNS) {
  2658. max_insns = TCG_MAX_INSNS;
  2659. }
  2660. if (in_superpage(&ctx, pc_start)) {
  2661. pc_mask = (1ULL << 41) - 1;
  2662. } else {
  2663. pc_mask = ~TARGET_PAGE_MASK;
  2664. }
  2665. gen_tb_start(tb);
  2666. do {
  2667. tcg_gen_insn_start(ctx.pc);
  2668. num_insns++;
  2669. if (unlikely(cpu_breakpoint_test(cs, ctx.pc, BP_ANY))) {
  2670. gen_excp(&ctx, EXCP_DEBUG, 0);
  2671. break;
  2672. }
  2673. if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
  2674. gen_io_start();
  2675. }
  2676. insn = cpu_ldl_code(env, ctx.pc);
  2677. TCGV_UNUSED_I64(ctx.zero);
  2678. TCGV_UNUSED_I64(ctx.sink);
  2679. TCGV_UNUSED_I64(ctx.lit);
  2680. ctx.pc += 4;
  2681. ret = translate_one(ctxp, insn);
  2682. if (!TCGV_IS_UNUSED_I64(ctx.sink)) {
  2683. tcg_gen_discard_i64(ctx.sink);
  2684. tcg_temp_free(ctx.sink);
  2685. }
  2686. if (!TCGV_IS_UNUSED_I64(ctx.zero)) {
  2687. tcg_temp_free(ctx.zero);
  2688. }
  2689. if (!TCGV_IS_UNUSED_I64(ctx.lit)) {
  2690. tcg_temp_free(ctx.lit);
  2691. }
  2692. /* If we reach a page boundary, are single stepping,
  2693. or exhaust instruction count, stop generation. */
  2694. if (ret == NO_EXIT
  2695. && ((ctx.pc & pc_mask) == 0
  2696. || tcg_op_buf_full()
  2697. || num_insns >= max_insns
  2698. || singlestep
  2699. || ctx.singlestep_enabled)) {
  2700. ret = EXIT_PC_STALE;
  2701. }
  2702. } while (ret == NO_EXIT);
  2703. if (tb->cflags & CF_LAST_IO) {
  2704. gen_io_end();
  2705. }
  2706. switch (ret) {
  2707. case EXIT_GOTO_TB:
  2708. case EXIT_NORETURN:
  2709. break;
  2710. case EXIT_PC_STALE:
  2711. tcg_gen_movi_i64(cpu_pc, ctx.pc);
  2712. /* FALLTHRU */
  2713. case EXIT_PC_UPDATED:
  2714. if (ctx.singlestep_enabled) {
  2715. gen_excp_1(EXCP_DEBUG, 0);
  2716. } else {
  2717. tcg_gen_exit_tb(0);
  2718. }
  2719. break;
  2720. default:
  2721. abort();
  2722. }
  2723. gen_tb_end(tb, num_insns);
  2724. tb->size = ctx.pc - pc_start;
  2725. tb->icount = num_insns;
  2726. #ifdef DEBUG_DISAS
  2727. if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
  2728. qemu_log("IN: %s\n", lookup_symbol(pc_start));
  2729. log_target_disas(cs, pc_start, ctx.pc - pc_start, 1);
  2730. qemu_log("\n");
  2731. }
  2732. #endif
  2733. }
  2734. void restore_state_to_opc(CPUAlphaState *env, TranslationBlock *tb,
  2735. target_ulong *data)
  2736. {
  2737. env->pc = data[0];
  2738. }