translate.c 80 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090
  1. /*
  2. * m68k translation
  3. *
  4. * Copyright (c) 2005-2007 CodeSourcery
  5. * Written by Paul Brook
  6. *
  7. * This library is free software; you can redistribute it and/or
  8. * modify it under the terms of the GNU Lesser General Public
  9. * License as published by the Free Software Foundation; either
  10. * version 2 of the License, or (at your option) any later version.
  11. *
  12. * This library is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  15. * General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU Lesser General Public
  18. * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  19. */
  20. #include "cpu.h"
  21. #include "disas/disas.h"
  22. #include "tcg-op.h"
  23. #include "qemu/log.h"
  24. #include "exec/cpu_ldst.h"
  25. #include "exec/helper-proto.h"
  26. #include "exec/helper-gen.h"
  27. #include "trace-tcg.h"
  28. //#define DEBUG_DISPATCH 1
  29. /* Fake floating point. */
  30. #define tcg_gen_mov_f64 tcg_gen_mov_i64
  31. #define tcg_gen_qemu_ldf64 tcg_gen_qemu_ld64
  32. #define tcg_gen_qemu_stf64 tcg_gen_qemu_st64
  33. #define DEFO32(name, offset) static TCGv QREG_##name;
  34. #define DEFO64(name, offset) static TCGv_i64 QREG_##name;
  35. #define DEFF64(name, offset) static TCGv_i64 QREG_##name;
  36. #include "qregs.def"
  37. #undef DEFO32
  38. #undef DEFO64
  39. #undef DEFF64
  40. static TCGv_i32 cpu_halted;
  41. static TCGv_i32 cpu_exception_index;
  42. static TCGv_ptr cpu_env;
  43. static char cpu_reg_names[3*8*3 + 5*4];
  44. static TCGv cpu_dregs[8];
  45. static TCGv cpu_aregs[8];
  46. static TCGv_i64 cpu_fregs[8];
  47. static TCGv_i64 cpu_macc[4];
  48. #define DREG(insn, pos) cpu_dregs[((insn) >> (pos)) & 7]
  49. #define AREG(insn, pos) cpu_aregs[((insn) >> (pos)) & 7]
  50. #define FREG(insn, pos) cpu_fregs[((insn) >> (pos)) & 7]
  51. #define MACREG(acc) cpu_macc[acc]
  52. #define QREG_SP cpu_aregs[7]
  53. static TCGv NULL_QREG;
  54. #define IS_NULL_QREG(t) (TCGV_EQUAL(t, NULL_QREG))
  55. /* Used to distinguish stores from bad addressing modes. */
  56. static TCGv store_dummy;
  57. #include "exec/gen-icount.h"
  58. void m68k_tcg_init(void)
  59. {
  60. char *p;
  61. int i;
  62. #define DEFO32(name, offset) QREG_##name = tcg_global_mem_new_i32(TCG_AREG0, offsetof(CPUM68KState, offset), #name);
  63. #define DEFO64(name, offset) QREG_##name = tcg_global_mem_new_i64(TCG_AREG0, offsetof(CPUM68KState, offset), #name);
  64. #define DEFF64(name, offset) DEFO64(name, offset)
  65. #include "qregs.def"
  66. #undef DEFO32
  67. #undef DEFO64
  68. #undef DEFF64
  69. cpu_halted = tcg_global_mem_new_i32(TCG_AREG0,
  70. -offsetof(M68kCPU, env) +
  71. offsetof(CPUState, halted), "HALTED");
  72. cpu_exception_index = tcg_global_mem_new_i32(TCG_AREG0,
  73. -offsetof(M68kCPU, env) +
  74. offsetof(CPUState, exception_index),
  75. "EXCEPTION");
  76. cpu_env = tcg_global_reg_new_ptr(TCG_AREG0, "env");
  77. p = cpu_reg_names;
  78. for (i = 0; i < 8; i++) {
  79. sprintf(p, "D%d", i);
  80. cpu_dregs[i] = tcg_global_mem_new(TCG_AREG0,
  81. offsetof(CPUM68KState, dregs[i]), p);
  82. p += 3;
  83. sprintf(p, "A%d", i);
  84. cpu_aregs[i] = tcg_global_mem_new(TCG_AREG0,
  85. offsetof(CPUM68KState, aregs[i]), p);
  86. p += 3;
  87. sprintf(p, "F%d", i);
  88. cpu_fregs[i] = tcg_global_mem_new_i64(TCG_AREG0,
  89. offsetof(CPUM68KState, fregs[i]), p);
  90. p += 3;
  91. }
  92. for (i = 0; i < 4; i++) {
  93. sprintf(p, "ACC%d", i);
  94. cpu_macc[i] = tcg_global_mem_new_i64(TCG_AREG0,
  95. offsetof(CPUM68KState, macc[i]), p);
  96. p += 5;
  97. }
  98. NULL_QREG = tcg_global_mem_new(TCG_AREG0, -4, "NULL");
  99. store_dummy = tcg_global_mem_new(TCG_AREG0, -8, "NULL");
  100. }
  101. /* internal defines */
  102. typedef struct DisasContext {
  103. CPUM68KState *env;
  104. target_ulong insn_pc; /* Start of the current instruction. */
  105. target_ulong pc;
  106. int is_jmp;
  107. int cc_op;
  108. int user;
  109. uint32_t fpcr;
  110. struct TranslationBlock *tb;
  111. int singlestep_enabled;
  112. TCGv_i64 mactmp;
  113. int done_mac;
  114. } DisasContext;
  115. #define DISAS_JUMP_NEXT 4
  116. #if defined(CONFIG_USER_ONLY)
  117. #define IS_USER(s) 1
  118. #else
  119. #define IS_USER(s) s->user
  120. #endif
  121. /* XXX: move that elsewhere */
  122. /* ??? Fix exceptions. */
  123. static void *gen_throws_exception;
  124. #define gen_last_qop NULL
  125. #define OS_BYTE 0
  126. #define OS_WORD 1
  127. #define OS_LONG 2
  128. #define OS_SINGLE 4
  129. #define OS_DOUBLE 5
  130. typedef void (*disas_proc)(CPUM68KState *env, DisasContext *s, uint16_t insn);
  131. #ifdef DEBUG_DISPATCH
  132. #define DISAS_INSN(name) \
  133. static void real_disas_##name(CPUM68KState *env, DisasContext *s, \
  134. uint16_t insn); \
  135. static void disas_##name(CPUM68KState *env, DisasContext *s, \
  136. uint16_t insn) \
  137. { \
  138. qemu_log("Dispatch " #name "\n"); \
  139. real_disas_##name(s, env, insn); \
  140. } \
  141. static void real_disas_##name(CPUM68KState *env, DisasContext *s, \
  142. uint16_t insn)
  143. #else
  144. #define DISAS_INSN(name) \
  145. static void disas_##name(CPUM68KState *env, DisasContext *s, \
  146. uint16_t insn)
  147. #endif
  148. /* Generate a load from the specified address. Narrow values are
  149. sign extended to full register width. */
  150. static inline TCGv gen_load(DisasContext * s, int opsize, TCGv addr, int sign)
  151. {
  152. TCGv tmp;
  153. int index = IS_USER(s);
  154. tmp = tcg_temp_new_i32();
  155. switch(opsize) {
  156. case OS_BYTE:
  157. if (sign)
  158. tcg_gen_qemu_ld8s(tmp, addr, index);
  159. else
  160. tcg_gen_qemu_ld8u(tmp, addr, index);
  161. break;
  162. case OS_WORD:
  163. if (sign)
  164. tcg_gen_qemu_ld16s(tmp, addr, index);
  165. else
  166. tcg_gen_qemu_ld16u(tmp, addr, index);
  167. break;
  168. case OS_LONG:
  169. case OS_SINGLE:
  170. tcg_gen_qemu_ld32u(tmp, addr, index);
  171. break;
  172. default:
  173. g_assert_not_reached();
  174. }
  175. gen_throws_exception = gen_last_qop;
  176. return tmp;
  177. }
  178. static inline TCGv_i64 gen_load64(DisasContext * s, TCGv addr)
  179. {
  180. TCGv_i64 tmp;
  181. int index = IS_USER(s);
  182. tmp = tcg_temp_new_i64();
  183. tcg_gen_qemu_ldf64(tmp, addr, index);
  184. gen_throws_exception = gen_last_qop;
  185. return tmp;
  186. }
  187. /* Generate a store. */
  188. static inline void gen_store(DisasContext *s, int opsize, TCGv addr, TCGv val)
  189. {
  190. int index = IS_USER(s);
  191. switch(opsize) {
  192. case OS_BYTE:
  193. tcg_gen_qemu_st8(val, addr, index);
  194. break;
  195. case OS_WORD:
  196. tcg_gen_qemu_st16(val, addr, index);
  197. break;
  198. case OS_LONG:
  199. case OS_SINGLE:
  200. tcg_gen_qemu_st32(val, addr, index);
  201. break;
  202. default:
  203. g_assert_not_reached();
  204. }
  205. gen_throws_exception = gen_last_qop;
  206. }
  207. static inline void gen_store64(DisasContext *s, TCGv addr, TCGv_i64 val)
  208. {
  209. int index = IS_USER(s);
  210. tcg_gen_qemu_stf64(val, addr, index);
  211. gen_throws_exception = gen_last_qop;
  212. }
  213. typedef enum {
  214. EA_STORE,
  215. EA_LOADU,
  216. EA_LOADS
  217. } ea_what;
  218. /* Generate an unsigned load if VAL is 0 a signed load if val is -1,
  219. otherwise generate a store. */
  220. static TCGv gen_ldst(DisasContext *s, int opsize, TCGv addr, TCGv val,
  221. ea_what what)
  222. {
  223. if (what == EA_STORE) {
  224. gen_store(s, opsize, addr, val);
  225. return store_dummy;
  226. } else {
  227. return gen_load(s, opsize, addr, what == EA_LOADS);
  228. }
  229. }
  230. /* Read a 32-bit immediate constant. */
  231. static inline uint32_t read_im32(CPUM68KState *env, DisasContext *s)
  232. {
  233. uint32_t im;
  234. im = ((uint32_t)cpu_lduw_code(env, s->pc)) << 16;
  235. s->pc += 2;
  236. im |= cpu_lduw_code(env, s->pc);
  237. s->pc += 2;
  238. return im;
  239. }
  240. /* Calculate and address index. */
  241. static TCGv gen_addr_index(uint16_t ext, TCGv tmp)
  242. {
  243. TCGv add;
  244. int scale;
  245. add = (ext & 0x8000) ? AREG(ext, 12) : DREG(ext, 12);
  246. if ((ext & 0x800) == 0) {
  247. tcg_gen_ext16s_i32(tmp, add);
  248. add = tmp;
  249. }
  250. scale = (ext >> 9) & 3;
  251. if (scale != 0) {
  252. tcg_gen_shli_i32(tmp, add, scale);
  253. add = tmp;
  254. }
  255. return add;
  256. }
  257. /* Handle a base + index + displacement effective addresss.
  258. A NULL_QREG base means pc-relative. */
  259. static TCGv gen_lea_indexed(CPUM68KState *env, DisasContext *s, TCGv base)
  260. {
  261. uint32_t offset;
  262. uint16_t ext;
  263. TCGv add;
  264. TCGv tmp;
  265. uint32_t bd, od;
  266. offset = s->pc;
  267. ext = cpu_lduw_code(env, s->pc);
  268. s->pc += 2;
  269. if ((ext & 0x800) == 0 && !m68k_feature(s->env, M68K_FEATURE_WORD_INDEX))
  270. return NULL_QREG;
  271. if (ext & 0x100) {
  272. /* full extension word format */
  273. if (!m68k_feature(s->env, M68K_FEATURE_EXT_FULL))
  274. return NULL_QREG;
  275. if ((ext & 0x30) > 0x10) {
  276. /* base displacement */
  277. if ((ext & 0x30) == 0x20) {
  278. bd = (int16_t)cpu_lduw_code(env, s->pc);
  279. s->pc += 2;
  280. } else {
  281. bd = read_im32(env, s);
  282. }
  283. } else {
  284. bd = 0;
  285. }
  286. tmp = tcg_temp_new();
  287. if ((ext & 0x44) == 0) {
  288. /* pre-index */
  289. add = gen_addr_index(ext, tmp);
  290. } else {
  291. add = NULL_QREG;
  292. }
  293. if ((ext & 0x80) == 0) {
  294. /* base not suppressed */
  295. if (IS_NULL_QREG(base)) {
  296. base = tcg_const_i32(offset + bd);
  297. bd = 0;
  298. }
  299. if (!IS_NULL_QREG(add)) {
  300. tcg_gen_add_i32(tmp, add, base);
  301. add = tmp;
  302. } else {
  303. add = base;
  304. }
  305. }
  306. if (!IS_NULL_QREG(add)) {
  307. if (bd != 0) {
  308. tcg_gen_addi_i32(tmp, add, bd);
  309. add = tmp;
  310. }
  311. } else {
  312. add = tcg_const_i32(bd);
  313. }
  314. if ((ext & 3) != 0) {
  315. /* memory indirect */
  316. base = gen_load(s, OS_LONG, add, 0);
  317. if ((ext & 0x44) == 4) {
  318. add = gen_addr_index(ext, tmp);
  319. tcg_gen_add_i32(tmp, add, base);
  320. add = tmp;
  321. } else {
  322. add = base;
  323. }
  324. if ((ext & 3) > 1) {
  325. /* outer displacement */
  326. if ((ext & 3) == 2) {
  327. od = (int16_t)cpu_lduw_code(env, s->pc);
  328. s->pc += 2;
  329. } else {
  330. od = read_im32(env, s);
  331. }
  332. } else {
  333. od = 0;
  334. }
  335. if (od != 0) {
  336. tcg_gen_addi_i32(tmp, add, od);
  337. add = tmp;
  338. }
  339. }
  340. } else {
  341. /* brief extension word format */
  342. tmp = tcg_temp_new();
  343. add = gen_addr_index(ext, tmp);
  344. if (!IS_NULL_QREG(base)) {
  345. tcg_gen_add_i32(tmp, add, base);
  346. if ((int8_t)ext)
  347. tcg_gen_addi_i32(tmp, tmp, (int8_t)ext);
  348. } else {
  349. tcg_gen_addi_i32(tmp, add, offset + (int8_t)ext);
  350. }
  351. add = tmp;
  352. }
  353. return add;
  354. }
  355. /* Update the CPU env CC_OP state. */
  356. static inline void gen_flush_cc_op(DisasContext *s)
  357. {
  358. if (s->cc_op != CC_OP_DYNAMIC)
  359. tcg_gen_movi_i32(QREG_CC_OP, s->cc_op);
  360. }
  361. /* Evaluate all the CC flags. */
  362. static inline void gen_flush_flags(DisasContext *s)
  363. {
  364. if (s->cc_op == CC_OP_FLAGS)
  365. return;
  366. gen_flush_cc_op(s);
  367. gen_helper_flush_flags(cpu_env, QREG_CC_OP);
  368. s->cc_op = CC_OP_FLAGS;
  369. }
  370. static void gen_logic_cc(DisasContext *s, TCGv val)
  371. {
  372. tcg_gen_mov_i32(QREG_CC_DEST, val);
  373. s->cc_op = CC_OP_LOGIC;
  374. }
  375. static void gen_update_cc_add(TCGv dest, TCGv src)
  376. {
  377. tcg_gen_mov_i32(QREG_CC_DEST, dest);
  378. tcg_gen_mov_i32(QREG_CC_SRC, src);
  379. }
  380. static inline int opsize_bytes(int opsize)
  381. {
  382. switch (opsize) {
  383. case OS_BYTE: return 1;
  384. case OS_WORD: return 2;
  385. case OS_LONG: return 4;
  386. case OS_SINGLE: return 4;
  387. case OS_DOUBLE: return 8;
  388. default:
  389. g_assert_not_reached();
  390. }
  391. }
  392. /* Assign value to a register. If the width is less than the register width
  393. only the low part of the register is set. */
  394. static void gen_partset_reg(int opsize, TCGv reg, TCGv val)
  395. {
  396. TCGv tmp;
  397. switch (opsize) {
  398. case OS_BYTE:
  399. tcg_gen_andi_i32(reg, reg, 0xffffff00);
  400. tmp = tcg_temp_new();
  401. tcg_gen_ext8u_i32(tmp, val);
  402. tcg_gen_or_i32(reg, reg, tmp);
  403. break;
  404. case OS_WORD:
  405. tcg_gen_andi_i32(reg, reg, 0xffff0000);
  406. tmp = tcg_temp_new();
  407. tcg_gen_ext16u_i32(tmp, val);
  408. tcg_gen_or_i32(reg, reg, tmp);
  409. break;
  410. case OS_LONG:
  411. case OS_SINGLE:
  412. tcg_gen_mov_i32(reg, val);
  413. break;
  414. default:
  415. g_assert_not_reached();
  416. }
  417. }
  418. /* Sign or zero extend a value. */
  419. static inline TCGv gen_extend(TCGv val, int opsize, int sign)
  420. {
  421. TCGv tmp;
  422. switch (opsize) {
  423. case OS_BYTE:
  424. tmp = tcg_temp_new();
  425. if (sign)
  426. tcg_gen_ext8s_i32(tmp, val);
  427. else
  428. tcg_gen_ext8u_i32(tmp, val);
  429. break;
  430. case OS_WORD:
  431. tmp = tcg_temp_new();
  432. if (sign)
  433. tcg_gen_ext16s_i32(tmp, val);
  434. else
  435. tcg_gen_ext16u_i32(tmp, val);
  436. break;
  437. case OS_LONG:
  438. case OS_SINGLE:
  439. tmp = val;
  440. break;
  441. default:
  442. g_assert_not_reached();
  443. }
  444. return tmp;
  445. }
  446. /* Generate code for an "effective address". Does not adjust the base
  447. register for autoincrement addressing modes. */
  448. static TCGv gen_lea(CPUM68KState *env, DisasContext *s, uint16_t insn,
  449. int opsize)
  450. {
  451. TCGv reg;
  452. TCGv tmp;
  453. uint16_t ext;
  454. uint32_t offset;
  455. switch ((insn >> 3) & 7) {
  456. case 0: /* Data register direct. */
  457. case 1: /* Address register direct. */
  458. return NULL_QREG;
  459. case 2: /* Indirect register */
  460. case 3: /* Indirect postincrement. */
  461. return AREG(insn, 0);
  462. case 4: /* Indirect predecrememnt. */
  463. reg = AREG(insn, 0);
  464. tmp = tcg_temp_new();
  465. tcg_gen_subi_i32(tmp, reg, opsize_bytes(opsize));
  466. return tmp;
  467. case 5: /* Indirect displacement. */
  468. reg = AREG(insn, 0);
  469. tmp = tcg_temp_new();
  470. ext = cpu_lduw_code(env, s->pc);
  471. s->pc += 2;
  472. tcg_gen_addi_i32(tmp, reg, (int16_t)ext);
  473. return tmp;
  474. case 6: /* Indirect index + displacement. */
  475. reg = AREG(insn, 0);
  476. return gen_lea_indexed(env, s, reg);
  477. case 7: /* Other */
  478. switch (insn & 7) {
  479. case 0: /* Absolute short. */
  480. offset = cpu_ldsw_code(env, s->pc);
  481. s->pc += 2;
  482. return tcg_const_i32(offset);
  483. case 1: /* Absolute long. */
  484. offset = read_im32(env, s);
  485. return tcg_const_i32(offset);
  486. case 2: /* pc displacement */
  487. offset = s->pc;
  488. offset += cpu_ldsw_code(env, s->pc);
  489. s->pc += 2;
  490. return tcg_const_i32(offset);
  491. case 3: /* pc index+displacement. */
  492. return gen_lea_indexed(env, s, NULL_QREG);
  493. case 4: /* Immediate. */
  494. default:
  495. return NULL_QREG;
  496. }
  497. }
  498. /* Should never happen. */
  499. return NULL_QREG;
  500. }
  501. /* Helper function for gen_ea. Reuse the computed address between the
  502. for read/write operands. */
  503. static inline TCGv gen_ea_once(CPUM68KState *env, DisasContext *s,
  504. uint16_t insn, int opsize, TCGv val,
  505. TCGv *addrp, ea_what what)
  506. {
  507. TCGv tmp;
  508. if (addrp && what == EA_STORE) {
  509. tmp = *addrp;
  510. } else {
  511. tmp = gen_lea(env, s, insn, opsize);
  512. if (IS_NULL_QREG(tmp))
  513. return tmp;
  514. if (addrp)
  515. *addrp = tmp;
  516. }
  517. return gen_ldst(s, opsize, tmp, val, what);
  518. }
  519. /* Generate code to load/store a value from/into an EA. If VAL > 0 this is
  520. a write otherwise it is a read (0 == sign extend, -1 == zero extend).
  521. ADDRP is non-null for readwrite operands. */
  522. static TCGv gen_ea(CPUM68KState *env, DisasContext *s, uint16_t insn,
  523. int opsize, TCGv val, TCGv *addrp, ea_what what)
  524. {
  525. TCGv reg;
  526. TCGv result;
  527. uint32_t offset;
  528. switch ((insn >> 3) & 7) {
  529. case 0: /* Data register direct. */
  530. reg = DREG(insn, 0);
  531. if (what == EA_STORE) {
  532. gen_partset_reg(opsize, reg, val);
  533. return store_dummy;
  534. } else {
  535. return gen_extend(reg, opsize, what == EA_LOADS);
  536. }
  537. case 1: /* Address register direct. */
  538. reg = AREG(insn, 0);
  539. if (what == EA_STORE) {
  540. tcg_gen_mov_i32(reg, val);
  541. return store_dummy;
  542. } else {
  543. return gen_extend(reg, opsize, what == EA_LOADS);
  544. }
  545. case 2: /* Indirect register */
  546. reg = AREG(insn, 0);
  547. return gen_ldst(s, opsize, reg, val, what);
  548. case 3: /* Indirect postincrement. */
  549. reg = AREG(insn, 0);
  550. result = gen_ldst(s, opsize, reg, val, what);
  551. /* ??? This is not exception safe. The instruction may still
  552. fault after this point. */
  553. if (what == EA_STORE || !addrp)
  554. tcg_gen_addi_i32(reg, reg, opsize_bytes(opsize));
  555. return result;
  556. case 4: /* Indirect predecrememnt. */
  557. {
  558. TCGv tmp;
  559. if (addrp && what == EA_STORE) {
  560. tmp = *addrp;
  561. } else {
  562. tmp = gen_lea(env, s, insn, opsize);
  563. if (IS_NULL_QREG(tmp))
  564. return tmp;
  565. if (addrp)
  566. *addrp = tmp;
  567. }
  568. result = gen_ldst(s, opsize, tmp, val, what);
  569. /* ??? This is not exception safe. The instruction may still
  570. fault after this point. */
  571. if (what == EA_STORE || !addrp) {
  572. reg = AREG(insn, 0);
  573. tcg_gen_mov_i32(reg, tmp);
  574. }
  575. }
  576. return result;
  577. case 5: /* Indirect displacement. */
  578. case 6: /* Indirect index + displacement. */
  579. return gen_ea_once(env, s, insn, opsize, val, addrp, what);
  580. case 7: /* Other */
  581. switch (insn & 7) {
  582. case 0: /* Absolute short. */
  583. case 1: /* Absolute long. */
  584. case 2: /* pc displacement */
  585. case 3: /* pc index+displacement. */
  586. return gen_ea_once(env, s, insn, opsize, val, addrp, what);
  587. case 4: /* Immediate. */
  588. /* Sign extend values for consistency. */
  589. switch (opsize) {
  590. case OS_BYTE:
  591. if (what == EA_LOADS) {
  592. offset = cpu_ldsb_code(env, s->pc + 1);
  593. } else {
  594. offset = cpu_ldub_code(env, s->pc + 1);
  595. }
  596. s->pc += 2;
  597. break;
  598. case OS_WORD:
  599. if (what == EA_LOADS) {
  600. offset = cpu_ldsw_code(env, s->pc);
  601. } else {
  602. offset = cpu_lduw_code(env, s->pc);
  603. }
  604. s->pc += 2;
  605. break;
  606. case OS_LONG:
  607. offset = read_im32(env, s);
  608. break;
  609. default:
  610. g_assert_not_reached();
  611. }
  612. return tcg_const_i32(offset);
  613. default:
  614. return NULL_QREG;
  615. }
  616. }
  617. /* Should never happen. */
  618. return NULL_QREG;
  619. }
  620. /* This generates a conditional branch, clobbering all temporaries. */
  621. static void gen_jmpcc(DisasContext *s, int cond, TCGLabel *l1)
  622. {
  623. TCGv tmp;
  624. /* TODO: Optimize compare/branch pairs rather than always flushing
  625. flag state to CC_OP_FLAGS. */
  626. gen_flush_flags(s);
  627. switch (cond) {
  628. case 0: /* T */
  629. tcg_gen_br(l1);
  630. break;
  631. case 1: /* F */
  632. break;
  633. case 2: /* HI (!C && !Z) */
  634. tmp = tcg_temp_new();
  635. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
  636. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  637. break;
  638. case 3: /* LS (C || Z) */
  639. tmp = tcg_temp_new();
  640. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C | CCF_Z);
  641. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  642. break;
  643. case 4: /* CC (!C) */
  644. tmp = tcg_temp_new();
  645. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
  646. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  647. break;
  648. case 5: /* CS (C) */
  649. tmp = tcg_temp_new();
  650. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_C);
  651. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  652. break;
  653. case 6: /* NE (!Z) */
  654. tmp = tcg_temp_new();
  655. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
  656. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  657. break;
  658. case 7: /* EQ (Z) */
  659. tmp = tcg_temp_new();
  660. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_Z);
  661. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  662. break;
  663. case 8: /* VC (!V) */
  664. tmp = tcg_temp_new();
  665. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
  666. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  667. break;
  668. case 9: /* VS (V) */
  669. tmp = tcg_temp_new();
  670. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_V);
  671. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  672. break;
  673. case 10: /* PL (!N) */
  674. tmp = tcg_temp_new();
  675. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
  676. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  677. break;
  678. case 11: /* MI (N) */
  679. tmp = tcg_temp_new();
  680. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
  681. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  682. break;
  683. case 12: /* GE (!(N ^ V)) */
  684. tmp = tcg_temp_new();
  685. assert(CCF_V == (CCF_N >> 2));
  686. tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
  687. tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
  688. tcg_gen_andi_i32(tmp, tmp, CCF_V);
  689. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  690. break;
  691. case 13: /* LT (N ^ V) */
  692. tmp = tcg_temp_new();
  693. assert(CCF_V == (CCF_N >> 2));
  694. tcg_gen_shri_i32(tmp, QREG_CC_DEST, 2);
  695. tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
  696. tcg_gen_andi_i32(tmp, tmp, CCF_V);
  697. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  698. break;
  699. case 14: /* GT (!(Z || (N ^ V))) */
  700. tmp = tcg_temp_new();
  701. assert(CCF_V == (CCF_N >> 2));
  702. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
  703. tcg_gen_shri_i32(tmp, tmp, 2);
  704. tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
  705. tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
  706. tcg_gen_brcondi_i32(TCG_COND_EQ, tmp, 0, l1);
  707. break;
  708. case 15: /* LE (Z || (N ^ V)) */
  709. tmp = tcg_temp_new();
  710. assert(CCF_V == (CCF_N >> 2));
  711. tcg_gen_andi_i32(tmp, QREG_CC_DEST, CCF_N);
  712. tcg_gen_shri_i32(tmp, tmp, 2);
  713. tcg_gen_xor_i32(tmp, tmp, QREG_CC_DEST);
  714. tcg_gen_andi_i32(tmp, tmp, CCF_V | CCF_Z);
  715. tcg_gen_brcondi_i32(TCG_COND_NE, tmp, 0, l1);
  716. break;
  717. default:
  718. /* Should ever happen. */
  719. abort();
  720. }
  721. }
  722. DISAS_INSN(scc)
  723. {
  724. TCGLabel *l1;
  725. int cond;
  726. TCGv reg;
  727. l1 = gen_new_label();
  728. cond = (insn >> 8) & 0xf;
  729. reg = DREG(insn, 0);
  730. tcg_gen_andi_i32(reg, reg, 0xffffff00);
  731. /* This is safe because we modify the reg directly, with no other values
  732. live. */
  733. gen_jmpcc(s, cond ^ 1, l1);
  734. tcg_gen_ori_i32(reg, reg, 0xff);
  735. gen_set_label(l1);
  736. }
  737. /* Force a TB lookup after an instruction that changes the CPU state. */
  738. static void gen_lookup_tb(DisasContext *s)
  739. {
  740. gen_flush_cc_op(s);
  741. tcg_gen_movi_i32(QREG_PC, s->pc);
  742. s->is_jmp = DISAS_UPDATE;
  743. }
  744. /* Generate a jump to an immediate address. */
  745. static void gen_jmp_im(DisasContext *s, uint32_t dest)
  746. {
  747. gen_flush_cc_op(s);
  748. tcg_gen_movi_i32(QREG_PC, dest);
  749. s->is_jmp = DISAS_JUMP;
  750. }
  751. /* Generate a jump to the address in qreg DEST. */
  752. static void gen_jmp(DisasContext *s, TCGv dest)
  753. {
  754. gen_flush_cc_op(s);
  755. tcg_gen_mov_i32(QREG_PC, dest);
  756. s->is_jmp = DISAS_JUMP;
  757. }
  758. static void gen_exception(DisasContext *s, uint32_t where, int nr)
  759. {
  760. gen_flush_cc_op(s);
  761. gen_jmp_im(s, where);
  762. gen_helper_raise_exception(cpu_env, tcg_const_i32(nr));
  763. }
  764. static inline void gen_addr_fault(DisasContext *s)
  765. {
  766. gen_exception(s, s->insn_pc, EXCP_ADDRESS);
  767. }
  768. #define SRC_EA(env, result, opsize, op_sign, addrp) do { \
  769. result = gen_ea(env, s, insn, opsize, NULL_QREG, addrp, \
  770. op_sign ? EA_LOADS : EA_LOADU); \
  771. if (IS_NULL_QREG(result)) { \
  772. gen_addr_fault(s); \
  773. return; \
  774. } \
  775. } while (0)
  776. #define DEST_EA(env, insn, opsize, val, addrp) do { \
  777. TCGv ea_result = gen_ea(env, s, insn, opsize, val, addrp, EA_STORE); \
  778. if (IS_NULL_QREG(ea_result)) { \
  779. gen_addr_fault(s); \
  780. return; \
  781. } \
  782. } while (0)
  783. /* Generate a jump to an immediate address. */
  784. static void gen_jmp_tb(DisasContext *s, int n, uint32_t dest)
  785. {
  786. TranslationBlock *tb;
  787. tb = s->tb;
  788. if (unlikely(s->singlestep_enabled)) {
  789. gen_exception(s, dest, EXCP_DEBUG);
  790. } else if ((tb->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK) ||
  791. (s->pc & TARGET_PAGE_MASK) == (dest & TARGET_PAGE_MASK)) {
  792. tcg_gen_goto_tb(n);
  793. tcg_gen_movi_i32(QREG_PC, dest);
  794. tcg_gen_exit_tb((uintptr_t)tb + n);
  795. } else {
  796. gen_jmp_im(s, dest);
  797. tcg_gen_exit_tb(0);
  798. }
  799. s->is_jmp = DISAS_TB_JUMP;
  800. }
  801. DISAS_INSN(undef_mac)
  802. {
  803. gen_exception(s, s->pc - 2, EXCP_LINEA);
  804. }
  805. DISAS_INSN(undef_fpu)
  806. {
  807. gen_exception(s, s->pc - 2, EXCP_LINEF);
  808. }
  809. DISAS_INSN(undef)
  810. {
  811. M68kCPU *cpu = m68k_env_get_cpu(env);
  812. gen_exception(s, s->pc - 2, EXCP_UNSUPPORTED);
  813. cpu_abort(CPU(cpu), "Illegal instruction: %04x @ %08x", insn, s->pc - 2);
  814. }
  815. DISAS_INSN(mulw)
  816. {
  817. TCGv reg;
  818. TCGv tmp;
  819. TCGv src;
  820. int sign;
  821. sign = (insn & 0x100) != 0;
  822. reg = DREG(insn, 9);
  823. tmp = tcg_temp_new();
  824. if (sign)
  825. tcg_gen_ext16s_i32(tmp, reg);
  826. else
  827. tcg_gen_ext16u_i32(tmp, reg);
  828. SRC_EA(env, src, OS_WORD, sign, NULL);
  829. tcg_gen_mul_i32(tmp, tmp, src);
  830. tcg_gen_mov_i32(reg, tmp);
  831. /* Unlike m68k, coldfire always clears the overflow bit. */
  832. gen_logic_cc(s, tmp);
  833. }
  834. DISAS_INSN(divw)
  835. {
  836. TCGv reg;
  837. TCGv tmp;
  838. TCGv src;
  839. int sign;
  840. sign = (insn & 0x100) != 0;
  841. reg = DREG(insn, 9);
  842. if (sign) {
  843. tcg_gen_ext16s_i32(QREG_DIV1, reg);
  844. } else {
  845. tcg_gen_ext16u_i32(QREG_DIV1, reg);
  846. }
  847. SRC_EA(env, src, OS_WORD, sign, NULL);
  848. tcg_gen_mov_i32(QREG_DIV2, src);
  849. if (sign) {
  850. gen_helper_divs(cpu_env, tcg_const_i32(1));
  851. } else {
  852. gen_helper_divu(cpu_env, tcg_const_i32(1));
  853. }
  854. tmp = tcg_temp_new();
  855. src = tcg_temp_new();
  856. tcg_gen_ext16u_i32(tmp, QREG_DIV1);
  857. tcg_gen_shli_i32(src, QREG_DIV2, 16);
  858. tcg_gen_or_i32(reg, tmp, src);
  859. s->cc_op = CC_OP_FLAGS;
  860. }
  861. DISAS_INSN(divl)
  862. {
  863. TCGv num;
  864. TCGv den;
  865. TCGv reg;
  866. uint16_t ext;
  867. ext = cpu_lduw_code(env, s->pc);
  868. s->pc += 2;
  869. if (ext & 0x87f8) {
  870. gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
  871. return;
  872. }
  873. num = DREG(ext, 12);
  874. reg = DREG(ext, 0);
  875. tcg_gen_mov_i32(QREG_DIV1, num);
  876. SRC_EA(env, den, OS_LONG, 0, NULL);
  877. tcg_gen_mov_i32(QREG_DIV2, den);
  878. if (ext & 0x0800) {
  879. gen_helper_divs(cpu_env, tcg_const_i32(0));
  880. } else {
  881. gen_helper_divu(cpu_env, tcg_const_i32(0));
  882. }
  883. if ((ext & 7) == ((ext >> 12) & 7)) {
  884. /* div */
  885. tcg_gen_mov_i32 (reg, QREG_DIV1);
  886. } else {
  887. /* rem */
  888. tcg_gen_mov_i32 (reg, QREG_DIV2);
  889. }
  890. s->cc_op = CC_OP_FLAGS;
  891. }
  892. DISAS_INSN(addsub)
  893. {
  894. TCGv reg;
  895. TCGv dest;
  896. TCGv src;
  897. TCGv tmp;
  898. TCGv addr;
  899. int add;
  900. add = (insn & 0x4000) != 0;
  901. reg = DREG(insn, 9);
  902. dest = tcg_temp_new();
  903. if (insn & 0x100) {
  904. SRC_EA(env, tmp, OS_LONG, 0, &addr);
  905. src = reg;
  906. } else {
  907. tmp = reg;
  908. SRC_EA(env, src, OS_LONG, 0, NULL);
  909. }
  910. if (add) {
  911. tcg_gen_add_i32(dest, tmp, src);
  912. gen_helper_xflag_lt(QREG_CC_X, dest, src);
  913. s->cc_op = CC_OP_ADD;
  914. } else {
  915. gen_helper_xflag_lt(QREG_CC_X, tmp, src);
  916. tcg_gen_sub_i32(dest, tmp, src);
  917. s->cc_op = CC_OP_SUB;
  918. }
  919. gen_update_cc_add(dest, src);
  920. if (insn & 0x100) {
  921. DEST_EA(env, insn, OS_LONG, dest, &addr);
  922. } else {
  923. tcg_gen_mov_i32(reg, dest);
  924. }
  925. }
  926. /* Reverse the order of the bits in REG. */
  927. DISAS_INSN(bitrev)
  928. {
  929. TCGv reg;
  930. reg = DREG(insn, 0);
  931. gen_helper_bitrev(reg, reg);
  932. }
  933. DISAS_INSN(bitop_reg)
  934. {
  935. int opsize;
  936. int op;
  937. TCGv src1;
  938. TCGv src2;
  939. TCGv tmp;
  940. TCGv addr;
  941. TCGv dest;
  942. if ((insn & 0x38) != 0)
  943. opsize = OS_BYTE;
  944. else
  945. opsize = OS_LONG;
  946. op = (insn >> 6) & 3;
  947. SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
  948. src2 = DREG(insn, 9);
  949. dest = tcg_temp_new();
  950. gen_flush_flags(s);
  951. tmp = tcg_temp_new();
  952. if (opsize == OS_BYTE)
  953. tcg_gen_andi_i32(tmp, src2, 7);
  954. else
  955. tcg_gen_andi_i32(tmp, src2, 31);
  956. src2 = tmp;
  957. tmp = tcg_temp_new();
  958. tcg_gen_shr_i32(tmp, src1, src2);
  959. tcg_gen_andi_i32(tmp, tmp, 1);
  960. tcg_gen_shli_i32(tmp, tmp, 2);
  961. /* Clear CCF_Z if bit set. */
  962. tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
  963. tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
  964. tcg_gen_shl_i32(tmp, tcg_const_i32(1), src2);
  965. switch (op) {
  966. case 1: /* bchg */
  967. tcg_gen_xor_i32(dest, src1, tmp);
  968. break;
  969. case 2: /* bclr */
  970. tcg_gen_not_i32(tmp, tmp);
  971. tcg_gen_and_i32(dest, src1, tmp);
  972. break;
  973. case 3: /* bset */
  974. tcg_gen_or_i32(dest, src1, tmp);
  975. break;
  976. default: /* btst */
  977. break;
  978. }
  979. if (op)
  980. DEST_EA(env, insn, opsize, dest, &addr);
  981. }
  982. DISAS_INSN(sats)
  983. {
  984. TCGv reg;
  985. reg = DREG(insn, 0);
  986. gen_flush_flags(s);
  987. gen_helper_sats(reg, reg, QREG_CC_DEST);
  988. gen_logic_cc(s, reg);
  989. }
  990. static void gen_push(DisasContext *s, TCGv val)
  991. {
  992. TCGv tmp;
  993. tmp = tcg_temp_new();
  994. tcg_gen_subi_i32(tmp, QREG_SP, 4);
  995. gen_store(s, OS_LONG, tmp, val);
  996. tcg_gen_mov_i32(QREG_SP, tmp);
  997. }
  998. DISAS_INSN(movem)
  999. {
  1000. TCGv addr;
  1001. int i;
  1002. uint16_t mask;
  1003. TCGv reg;
  1004. TCGv tmp;
  1005. int is_load;
  1006. mask = cpu_lduw_code(env, s->pc);
  1007. s->pc += 2;
  1008. tmp = gen_lea(env, s, insn, OS_LONG);
  1009. if (IS_NULL_QREG(tmp)) {
  1010. gen_addr_fault(s);
  1011. return;
  1012. }
  1013. addr = tcg_temp_new();
  1014. tcg_gen_mov_i32(addr, tmp);
  1015. is_load = ((insn & 0x0400) != 0);
  1016. for (i = 0; i < 16; i++, mask >>= 1) {
  1017. if (mask & 1) {
  1018. if (i < 8)
  1019. reg = DREG(i, 0);
  1020. else
  1021. reg = AREG(i, 0);
  1022. if (is_load) {
  1023. tmp = gen_load(s, OS_LONG, addr, 0);
  1024. tcg_gen_mov_i32(reg, tmp);
  1025. } else {
  1026. gen_store(s, OS_LONG, addr, reg);
  1027. }
  1028. if (mask != 1)
  1029. tcg_gen_addi_i32(addr, addr, 4);
  1030. }
  1031. }
  1032. }
  1033. DISAS_INSN(bitop_im)
  1034. {
  1035. int opsize;
  1036. int op;
  1037. TCGv src1;
  1038. uint32_t mask;
  1039. int bitnum;
  1040. TCGv tmp;
  1041. TCGv addr;
  1042. if ((insn & 0x38) != 0)
  1043. opsize = OS_BYTE;
  1044. else
  1045. opsize = OS_LONG;
  1046. op = (insn >> 6) & 3;
  1047. bitnum = cpu_lduw_code(env, s->pc);
  1048. s->pc += 2;
  1049. if (bitnum & 0xff00) {
  1050. disas_undef(env, s, insn);
  1051. return;
  1052. }
  1053. SRC_EA(env, src1, opsize, 0, op ? &addr: NULL);
  1054. gen_flush_flags(s);
  1055. if (opsize == OS_BYTE)
  1056. bitnum &= 7;
  1057. else
  1058. bitnum &= 31;
  1059. mask = 1 << bitnum;
  1060. tmp = tcg_temp_new();
  1061. assert (CCF_Z == (1 << 2));
  1062. if (bitnum > 2)
  1063. tcg_gen_shri_i32(tmp, src1, bitnum - 2);
  1064. else if (bitnum < 2)
  1065. tcg_gen_shli_i32(tmp, src1, 2 - bitnum);
  1066. else
  1067. tcg_gen_mov_i32(tmp, src1);
  1068. tcg_gen_andi_i32(tmp, tmp, CCF_Z);
  1069. /* Clear CCF_Z if bit set. */
  1070. tcg_gen_ori_i32(QREG_CC_DEST, QREG_CC_DEST, CCF_Z);
  1071. tcg_gen_xor_i32(QREG_CC_DEST, QREG_CC_DEST, tmp);
  1072. if (op) {
  1073. switch (op) {
  1074. case 1: /* bchg */
  1075. tcg_gen_xori_i32(tmp, src1, mask);
  1076. break;
  1077. case 2: /* bclr */
  1078. tcg_gen_andi_i32(tmp, src1, ~mask);
  1079. break;
  1080. case 3: /* bset */
  1081. tcg_gen_ori_i32(tmp, src1, mask);
  1082. break;
  1083. default: /* btst */
  1084. break;
  1085. }
  1086. DEST_EA(env, insn, opsize, tmp, &addr);
  1087. }
  1088. }
  1089. DISAS_INSN(arith_im)
  1090. {
  1091. int op;
  1092. uint32_t im;
  1093. TCGv src1;
  1094. TCGv dest;
  1095. TCGv addr;
  1096. op = (insn >> 9) & 7;
  1097. SRC_EA(env, src1, OS_LONG, 0, (op == 6) ? NULL : &addr);
  1098. im = read_im32(env, s);
  1099. dest = tcg_temp_new();
  1100. switch (op) {
  1101. case 0: /* ori */
  1102. tcg_gen_ori_i32(dest, src1, im);
  1103. gen_logic_cc(s, dest);
  1104. break;
  1105. case 1: /* andi */
  1106. tcg_gen_andi_i32(dest, src1, im);
  1107. gen_logic_cc(s, dest);
  1108. break;
  1109. case 2: /* subi */
  1110. tcg_gen_mov_i32(dest, src1);
  1111. gen_helper_xflag_lt(QREG_CC_X, dest, tcg_const_i32(im));
  1112. tcg_gen_subi_i32(dest, dest, im);
  1113. gen_update_cc_add(dest, tcg_const_i32(im));
  1114. s->cc_op = CC_OP_SUB;
  1115. break;
  1116. case 3: /* addi */
  1117. tcg_gen_mov_i32(dest, src1);
  1118. tcg_gen_addi_i32(dest, dest, im);
  1119. gen_update_cc_add(dest, tcg_const_i32(im));
  1120. gen_helper_xflag_lt(QREG_CC_X, dest, tcg_const_i32(im));
  1121. s->cc_op = CC_OP_ADD;
  1122. break;
  1123. case 5: /* eori */
  1124. tcg_gen_xori_i32(dest, src1, im);
  1125. gen_logic_cc(s, dest);
  1126. break;
  1127. case 6: /* cmpi */
  1128. tcg_gen_mov_i32(dest, src1);
  1129. tcg_gen_subi_i32(dest, dest, im);
  1130. gen_update_cc_add(dest, tcg_const_i32(im));
  1131. s->cc_op = CC_OP_SUB;
  1132. break;
  1133. default:
  1134. abort();
  1135. }
  1136. if (op != 6) {
  1137. DEST_EA(env, insn, OS_LONG, dest, &addr);
  1138. }
  1139. }
  1140. DISAS_INSN(byterev)
  1141. {
  1142. TCGv reg;
  1143. reg = DREG(insn, 0);
  1144. tcg_gen_bswap32_i32(reg, reg);
  1145. }
  1146. DISAS_INSN(move)
  1147. {
  1148. TCGv src;
  1149. TCGv dest;
  1150. int op;
  1151. int opsize;
  1152. switch (insn >> 12) {
  1153. case 1: /* move.b */
  1154. opsize = OS_BYTE;
  1155. break;
  1156. case 2: /* move.l */
  1157. opsize = OS_LONG;
  1158. break;
  1159. case 3: /* move.w */
  1160. opsize = OS_WORD;
  1161. break;
  1162. default:
  1163. abort();
  1164. }
  1165. SRC_EA(env, src, opsize, 1, NULL);
  1166. op = (insn >> 6) & 7;
  1167. if (op == 1) {
  1168. /* movea */
  1169. /* The value will already have been sign extended. */
  1170. dest = AREG(insn, 9);
  1171. tcg_gen_mov_i32(dest, src);
  1172. } else {
  1173. /* normal move */
  1174. uint16_t dest_ea;
  1175. dest_ea = ((insn >> 9) & 7) | (op << 3);
  1176. DEST_EA(env, dest_ea, opsize, src, NULL);
  1177. /* This will be correct because loads sign extend. */
  1178. gen_logic_cc(s, src);
  1179. }
  1180. }
  1181. DISAS_INSN(negx)
  1182. {
  1183. TCGv reg;
  1184. gen_flush_flags(s);
  1185. reg = DREG(insn, 0);
  1186. gen_helper_subx_cc(reg, cpu_env, tcg_const_i32(0), reg);
  1187. }
  1188. DISAS_INSN(lea)
  1189. {
  1190. TCGv reg;
  1191. TCGv tmp;
  1192. reg = AREG(insn, 9);
  1193. tmp = gen_lea(env, s, insn, OS_LONG);
  1194. if (IS_NULL_QREG(tmp)) {
  1195. gen_addr_fault(s);
  1196. return;
  1197. }
  1198. tcg_gen_mov_i32(reg, tmp);
  1199. }
  1200. DISAS_INSN(clr)
  1201. {
  1202. int opsize;
  1203. switch ((insn >> 6) & 3) {
  1204. case 0: /* clr.b */
  1205. opsize = OS_BYTE;
  1206. break;
  1207. case 1: /* clr.w */
  1208. opsize = OS_WORD;
  1209. break;
  1210. case 2: /* clr.l */
  1211. opsize = OS_LONG;
  1212. break;
  1213. default:
  1214. abort();
  1215. }
  1216. DEST_EA(env, insn, opsize, tcg_const_i32(0), NULL);
  1217. gen_logic_cc(s, tcg_const_i32(0));
  1218. }
  1219. static TCGv gen_get_ccr(DisasContext *s)
  1220. {
  1221. TCGv dest;
  1222. gen_flush_flags(s);
  1223. dest = tcg_temp_new();
  1224. tcg_gen_shli_i32(dest, QREG_CC_X, 4);
  1225. tcg_gen_or_i32(dest, dest, QREG_CC_DEST);
  1226. return dest;
  1227. }
  1228. DISAS_INSN(move_from_ccr)
  1229. {
  1230. TCGv reg;
  1231. TCGv ccr;
  1232. ccr = gen_get_ccr(s);
  1233. reg = DREG(insn, 0);
  1234. gen_partset_reg(OS_WORD, reg, ccr);
  1235. }
  1236. DISAS_INSN(neg)
  1237. {
  1238. TCGv reg;
  1239. TCGv src1;
  1240. reg = DREG(insn, 0);
  1241. src1 = tcg_temp_new();
  1242. tcg_gen_mov_i32(src1, reg);
  1243. tcg_gen_neg_i32(reg, src1);
  1244. s->cc_op = CC_OP_SUB;
  1245. gen_update_cc_add(reg, src1);
  1246. gen_helper_xflag_lt(QREG_CC_X, tcg_const_i32(0), src1);
  1247. s->cc_op = CC_OP_SUB;
  1248. }
  1249. static void gen_set_sr_im(DisasContext *s, uint16_t val, int ccr_only)
  1250. {
  1251. tcg_gen_movi_i32(QREG_CC_DEST, val & 0xf);
  1252. tcg_gen_movi_i32(QREG_CC_X, (val & 0x10) >> 4);
  1253. if (!ccr_only) {
  1254. gen_helper_set_sr(cpu_env, tcg_const_i32(val & 0xff00));
  1255. }
  1256. }
  1257. static void gen_set_sr(CPUM68KState *env, DisasContext *s, uint16_t insn,
  1258. int ccr_only)
  1259. {
  1260. TCGv tmp;
  1261. TCGv reg;
  1262. s->cc_op = CC_OP_FLAGS;
  1263. if ((insn & 0x38) == 0)
  1264. {
  1265. tmp = tcg_temp_new();
  1266. reg = DREG(insn, 0);
  1267. tcg_gen_andi_i32(QREG_CC_DEST, reg, 0xf);
  1268. tcg_gen_shri_i32(tmp, reg, 4);
  1269. tcg_gen_andi_i32(QREG_CC_X, tmp, 1);
  1270. if (!ccr_only) {
  1271. gen_helper_set_sr(cpu_env, reg);
  1272. }
  1273. }
  1274. else if ((insn & 0x3f) == 0x3c)
  1275. {
  1276. uint16_t val;
  1277. val = cpu_lduw_code(env, s->pc);
  1278. s->pc += 2;
  1279. gen_set_sr_im(s, val, ccr_only);
  1280. }
  1281. else
  1282. disas_undef(env, s, insn);
  1283. }
  1284. DISAS_INSN(move_to_ccr)
  1285. {
  1286. gen_set_sr(env, s, insn, 1);
  1287. }
  1288. DISAS_INSN(not)
  1289. {
  1290. TCGv reg;
  1291. reg = DREG(insn, 0);
  1292. tcg_gen_not_i32(reg, reg);
  1293. gen_logic_cc(s, reg);
  1294. }
  1295. DISAS_INSN(swap)
  1296. {
  1297. TCGv src1;
  1298. TCGv src2;
  1299. TCGv reg;
  1300. src1 = tcg_temp_new();
  1301. src2 = tcg_temp_new();
  1302. reg = DREG(insn, 0);
  1303. tcg_gen_shli_i32(src1, reg, 16);
  1304. tcg_gen_shri_i32(src2, reg, 16);
  1305. tcg_gen_or_i32(reg, src1, src2);
  1306. gen_logic_cc(s, reg);
  1307. }
  1308. DISAS_INSN(pea)
  1309. {
  1310. TCGv tmp;
  1311. tmp = gen_lea(env, s, insn, OS_LONG);
  1312. if (IS_NULL_QREG(tmp)) {
  1313. gen_addr_fault(s);
  1314. return;
  1315. }
  1316. gen_push(s, tmp);
  1317. }
  1318. DISAS_INSN(ext)
  1319. {
  1320. int op;
  1321. TCGv reg;
  1322. TCGv tmp;
  1323. reg = DREG(insn, 0);
  1324. op = (insn >> 6) & 7;
  1325. tmp = tcg_temp_new();
  1326. if (op == 3)
  1327. tcg_gen_ext16s_i32(tmp, reg);
  1328. else
  1329. tcg_gen_ext8s_i32(tmp, reg);
  1330. if (op == 2)
  1331. gen_partset_reg(OS_WORD, reg, tmp);
  1332. else
  1333. tcg_gen_mov_i32(reg, tmp);
  1334. gen_logic_cc(s, tmp);
  1335. }
  1336. DISAS_INSN(tst)
  1337. {
  1338. int opsize;
  1339. TCGv tmp;
  1340. switch ((insn >> 6) & 3) {
  1341. case 0: /* tst.b */
  1342. opsize = OS_BYTE;
  1343. break;
  1344. case 1: /* tst.w */
  1345. opsize = OS_WORD;
  1346. break;
  1347. case 2: /* tst.l */
  1348. opsize = OS_LONG;
  1349. break;
  1350. default:
  1351. abort();
  1352. }
  1353. SRC_EA(env, tmp, opsize, 1, NULL);
  1354. gen_logic_cc(s, tmp);
  1355. }
  1356. DISAS_INSN(pulse)
  1357. {
  1358. /* Implemented as a NOP. */
  1359. }
  1360. DISAS_INSN(illegal)
  1361. {
  1362. gen_exception(s, s->pc - 2, EXCP_ILLEGAL);
  1363. }
  1364. /* ??? This should be atomic. */
  1365. DISAS_INSN(tas)
  1366. {
  1367. TCGv dest;
  1368. TCGv src1;
  1369. TCGv addr;
  1370. dest = tcg_temp_new();
  1371. SRC_EA(env, src1, OS_BYTE, 1, &addr);
  1372. gen_logic_cc(s, src1);
  1373. tcg_gen_ori_i32(dest, src1, 0x80);
  1374. DEST_EA(env, insn, OS_BYTE, dest, &addr);
  1375. }
  1376. DISAS_INSN(mull)
  1377. {
  1378. uint16_t ext;
  1379. TCGv reg;
  1380. TCGv src1;
  1381. TCGv dest;
  1382. /* The upper 32 bits of the product are discarded, so
  1383. muls.l and mulu.l are functionally equivalent. */
  1384. ext = cpu_lduw_code(env, s->pc);
  1385. s->pc += 2;
  1386. if (ext & 0x87ff) {
  1387. gen_exception(s, s->pc - 4, EXCP_UNSUPPORTED);
  1388. return;
  1389. }
  1390. reg = DREG(ext, 12);
  1391. SRC_EA(env, src1, OS_LONG, 0, NULL);
  1392. dest = tcg_temp_new();
  1393. tcg_gen_mul_i32(dest, src1, reg);
  1394. tcg_gen_mov_i32(reg, dest);
  1395. /* Unlike m68k, coldfire always clears the overflow bit. */
  1396. gen_logic_cc(s, dest);
  1397. }
  1398. DISAS_INSN(link)
  1399. {
  1400. int16_t offset;
  1401. TCGv reg;
  1402. TCGv tmp;
  1403. offset = cpu_ldsw_code(env, s->pc);
  1404. s->pc += 2;
  1405. reg = AREG(insn, 0);
  1406. tmp = tcg_temp_new();
  1407. tcg_gen_subi_i32(tmp, QREG_SP, 4);
  1408. gen_store(s, OS_LONG, tmp, reg);
  1409. if ((insn & 7) != 7)
  1410. tcg_gen_mov_i32(reg, tmp);
  1411. tcg_gen_addi_i32(QREG_SP, tmp, offset);
  1412. }
  1413. DISAS_INSN(unlk)
  1414. {
  1415. TCGv src;
  1416. TCGv reg;
  1417. TCGv tmp;
  1418. src = tcg_temp_new();
  1419. reg = AREG(insn, 0);
  1420. tcg_gen_mov_i32(src, reg);
  1421. tmp = gen_load(s, OS_LONG, src, 0);
  1422. tcg_gen_mov_i32(reg, tmp);
  1423. tcg_gen_addi_i32(QREG_SP, src, 4);
  1424. }
  1425. DISAS_INSN(nop)
  1426. {
  1427. }
  1428. DISAS_INSN(rts)
  1429. {
  1430. TCGv tmp;
  1431. tmp = gen_load(s, OS_LONG, QREG_SP, 0);
  1432. tcg_gen_addi_i32(QREG_SP, QREG_SP, 4);
  1433. gen_jmp(s, tmp);
  1434. }
  1435. DISAS_INSN(jump)
  1436. {
  1437. TCGv tmp;
  1438. /* Load the target address first to ensure correct exception
  1439. behavior. */
  1440. tmp = gen_lea(env, s, insn, OS_LONG);
  1441. if (IS_NULL_QREG(tmp)) {
  1442. gen_addr_fault(s);
  1443. return;
  1444. }
  1445. if ((insn & 0x40) == 0) {
  1446. /* jsr */
  1447. gen_push(s, tcg_const_i32(s->pc));
  1448. }
  1449. gen_jmp(s, tmp);
  1450. }
  1451. DISAS_INSN(addsubq)
  1452. {
  1453. TCGv src1;
  1454. TCGv src2;
  1455. TCGv dest;
  1456. int val;
  1457. TCGv addr;
  1458. SRC_EA(env, src1, OS_LONG, 0, &addr);
  1459. val = (insn >> 9) & 7;
  1460. if (val == 0)
  1461. val = 8;
  1462. dest = tcg_temp_new();
  1463. tcg_gen_mov_i32(dest, src1);
  1464. if ((insn & 0x38) == 0x08) {
  1465. /* Don't update condition codes if the destination is an
  1466. address register. */
  1467. if (insn & 0x0100) {
  1468. tcg_gen_subi_i32(dest, dest, val);
  1469. } else {
  1470. tcg_gen_addi_i32(dest, dest, val);
  1471. }
  1472. } else {
  1473. src2 = tcg_const_i32(val);
  1474. if (insn & 0x0100) {
  1475. gen_helper_xflag_lt(QREG_CC_X, dest, src2);
  1476. tcg_gen_subi_i32(dest, dest, val);
  1477. s->cc_op = CC_OP_SUB;
  1478. } else {
  1479. tcg_gen_addi_i32(dest, dest, val);
  1480. gen_helper_xflag_lt(QREG_CC_X, dest, src2);
  1481. s->cc_op = CC_OP_ADD;
  1482. }
  1483. gen_update_cc_add(dest, src2);
  1484. }
  1485. DEST_EA(env, insn, OS_LONG, dest, &addr);
  1486. }
  1487. DISAS_INSN(tpf)
  1488. {
  1489. switch (insn & 7) {
  1490. case 2: /* One extension word. */
  1491. s->pc += 2;
  1492. break;
  1493. case 3: /* Two extension words. */
  1494. s->pc += 4;
  1495. break;
  1496. case 4: /* No extension words. */
  1497. break;
  1498. default:
  1499. disas_undef(env, s, insn);
  1500. }
  1501. }
  1502. DISAS_INSN(branch)
  1503. {
  1504. int32_t offset;
  1505. uint32_t base;
  1506. int op;
  1507. TCGLabel *l1;
  1508. base = s->pc;
  1509. op = (insn >> 8) & 0xf;
  1510. offset = (int8_t)insn;
  1511. if (offset == 0) {
  1512. offset = cpu_ldsw_code(env, s->pc);
  1513. s->pc += 2;
  1514. } else if (offset == -1) {
  1515. offset = read_im32(env, s);
  1516. }
  1517. if (op == 1) {
  1518. /* bsr */
  1519. gen_push(s, tcg_const_i32(s->pc));
  1520. }
  1521. gen_flush_cc_op(s);
  1522. if (op > 1) {
  1523. /* Bcc */
  1524. l1 = gen_new_label();
  1525. gen_jmpcc(s, ((insn >> 8) & 0xf) ^ 1, l1);
  1526. gen_jmp_tb(s, 1, base + offset);
  1527. gen_set_label(l1);
  1528. gen_jmp_tb(s, 0, s->pc);
  1529. } else {
  1530. /* Unconditional branch. */
  1531. gen_jmp_tb(s, 0, base + offset);
  1532. }
  1533. }
  1534. DISAS_INSN(moveq)
  1535. {
  1536. uint32_t val;
  1537. val = (int8_t)insn;
  1538. tcg_gen_movi_i32(DREG(insn, 9), val);
  1539. gen_logic_cc(s, tcg_const_i32(val));
  1540. }
  1541. DISAS_INSN(mvzs)
  1542. {
  1543. int opsize;
  1544. TCGv src;
  1545. TCGv reg;
  1546. if (insn & 0x40)
  1547. opsize = OS_WORD;
  1548. else
  1549. opsize = OS_BYTE;
  1550. SRC_EA(env, src, opsize, (insn & 0x80) == 0, NULL);
  1551. reg = DREG(insn, 9);
  1552. tcg_gen_mov_i32(reg, src);
  1553. gen_logic_cc(s, src);
  1554. }
  1555. DISAS_INSN(or)
  1556. {
  1557. TCGv reg;
  1558. TCGv dest;
  1559. TCGv src;
  1560. TCGv addr;
  1561. reg = DREG(insn, 9);
  1562. dest = tcg_temp_new();
  1563. if (insn & 0x100) {
  1564. SRC_EA(env, src, OS_LONG, 0, &addr);
  1565. tcg_gen_or_i32(dest, src, reg);
  1566. DEST_EA(env, insn, OS_LONG, dest, &addr);
  1567. } else {
  1568. SRC_EA(env, src, OS_LONG, 0, NULL);
  1569. tcg_gen_or_i32(dest, src, reg);
  1570. tcg_gen_mov_i32(reg, dest);
  1571. }
  1572. gen_logic_cc(s, dest);
  1573. }
  1574. DISAS_INSN(suba)
  1575. {
  1576. TCGv src;
  1577. TCGv reg;
  1578. SRC_EA(env, src, OS_LONG, 0, NULL);
  1579. reg = AREG(insn, 9);
  1580. tcg_gen_sub_i32(reg, reg, src);
  1581. }
  1582. DISAS_INSN(subx)
  1583. {
  1584. TCGv reg;
  1585. TCGv src;
  1586. gen_flush_flags(s);
  1587. reg = DREG(insn, 9);
  1588. src = DREG(insn, 0);
  1589. gen_helper_subx_cc(reg, cpu_env, reg, src);
  1590. }
  1591. DISAS_INSN(mov3q)
  1592. {
  1593. TCGv src;
  1594. int val;
  1595. val = (insn >> 9) & 7;
  1596. if (val == 0)
  1597. val = -1;
  1598. src = tcg_const_i32(val);
  1599. gen_logic_cc(s, src);
  1600. DEST_EA(env, insn, OS_LONG, src, NULL);
  1601. }
  1602. DISAS_INSN(cmp)
  1603. {
  1604. int op;
  1605. TCGv src;
  1606. TCGv reg;
  1607. TCGv dest;
  1608. int opsize;
  1609. op = (insn >> 6) & 3;
  1610. switch (op) {
  1611. case 0: /* cmp.b */
  1612. opsize = OS_BYTE;
  1613. s->cc_op = CC_OP_CMPB;
  1614. break;
  1615. case 1: /* cmp.w */
  1616. opsize = OS_WORD;
  1617. s->cc_op = CC_OP_CMPW;
  1618. break;
  1619. case 2: /* cmp.l */
  1620. opsize = OS_LONG;
  1621. s->cc_op = CC_OP_SUB;
  1622. break;
  1623. default:
  1624. abort();
  1625. }
  1626. SRC_EA(env, src, opsize, 1, NULL);
  1627. reg = DREG(insn, 9);
  1628. dest = tcg_temp_new();
  1629. tcg_gen_sub_i32(dest, reg, src);
  1630. gen_update_cc_add(dest, src);
  1631. }
  1632. DISAS_INSN(cmpa)
  1633. {
  1634. int opsize;
  1635. TCGv src;
  1636. TCGv reg;
  1637. TCGv dest;
  1638. if (insn & 0x100) {
  1639. opsize = OS_LONG;
  1640. } else {
  1641. opsize = OS_WORD;
  1642. }
  1643. SRC_EA(env, src, opsize, 1, NULL);
  1644. reg = AREG(insn, 9);
  1645. dest = tcg_temp_new();
  1646. tcg_gen_sub_i32(dest, reg, src);
  1647. gen_update_cc_add(dest, src);
  1648. s->cc_op = CC_OP_SUB;
  1649. }
  1650. DISAS_INSN(eor)
  1651. {
  1652. TCGv src;
  1653. TCGv reg;
  1654. TCGv dest;
  1655. TCGv addr;
  1656. SRC_EA(env, src, OS_LONG, 0, &addr);
  1657. reg = DREG(insn, 9);
  1658. dest = tcg_temp_new();
  1659. tcg_gen_xor_i32(dest, src, reg);
  1660. gen_logic_cc(s, dest);
  1661. DEST_EA(env, insn, OS_LONG, dest, &addr);
  1662. }
  1663. DISAS_INSN(and)
  1664. {
  1665. TCGv src;
  1666. TCGv reg;
  1667. TCGv dest;
  1668. TCGv addr;
  1669. reg = DREG(insn, 9);
  1670. dest = tcg_temp_new();
  1671. if (insn & 0x100) {
  1672. SRC_EA(env, src, OS_LONG, 0, &addr);
  1673. tcg_gen_and_i32(dest, src, reg);
  1674. DEST_EA(env, insn, OS_LONG, dest, &addr);
  1675. } else {
  1676. SRC_EA(env, src, OS_LONG, 0, NULL);
  1677. tcg_gen_and_i32(dest, src, reg);
  1678. tcg_gen_mov_i32(reg, dest);
  1679. }
  1680. gen_logic_cc(s, dest);
  1681. }
  1682. DISAS_INSN(adda)
  1683. {
  1684. TCGv src;
  1685. TCGv reg;
  1686. SRC_EA(env, src, OS_LONG, 0, NULL);
  1687. reg = AREG(insn, 9);
  1688. tcg_gen_add_i32(reg, reg, src);
  1689. }
  1690. DISAS_INSN(addx)
  1691. {
  1692. TCGv reg;
  1693. TCGv src;
  1694. gen_flush_flags(s);
  1695. reg = DREG(insn, 9);
  1696. src = DREG(insn, 0);
  1697. gen_helper_addx_cc(reg, cpu_env, reg, src);
  1698. s->cc_op = CC_OP_FLAGS;
  1699. }
  1700. /* TODO: This could be implemented without helper functions. */
  1701. DISAS_INSN(shift_im)
  1702. {
  1703. TCGv reg;
  1704. int tmp;
  1705. TCGv shift;
  1706. reg = DREG(insn, 0);
  1707. tmp = (insn >> 9) & 7;
  1708. if (tmp == 0)
  1709. tmp = 8;
  1710. shift = tcg_const_i32(tmp);
  1711. /* No need to flush flags becuse we know we will set C flag. */
  1712. if (insn & 0x100) {
  1713. gen_helper_shl_cc(reg, cpu_env, reg, shift);
  1714. } else {
  1715. if (insn & 8) {
  1716. gen_helper_shr_cc(reg, cpu_env, reg, shift);
  1717. } else {
  1718. gen_helper_sar_cc(reg, cpu_env, reg, shift);
  1719. }
  1720. }
  1721. s->cc_op = CC_OP_SHIFT;
  1722. }
  1723. DISAS_INSN(shift_reg)
  1724. {
  1725. TCGv reg;
  1726. TCGv shift;
  1727. reg = DREG(insn, 0);
  1728. shift = DREG(insn, 9);
  1729. /* Shift by zero leaves C flag unmodified. */
  1730. gen_flush_flags(s);
  1731. if (insn & 0x100) {
  1732. gen_helper_shl_cc(reg, cpu_env, reg, shift);
  1733. } else {
  1734. if (insn & 8) {
  1735. gen_helper_shr_cc(reg, cpu_env, reg, shift);
  1736. } else {
  1737. gen_helper_sar_cc(reg, cpu_env, reg, shift);
  1738. }
  1739. }
  1740. s->cc_op = CC_OP_SHIFT;
  1741. }
  1742. DISAS_INSN(ff1)
  1743. {
  1744. TCGv reg;
  1745. reg = DREG(insn, 0);
  1746. gen_logic_cc(s, reg);
  1747. gen_helper_ff1(reg, reg);
  1748. }
  1749. static TCGv gen_get_sr(DisasContext *s)
  1750. {
  1751. TCGv ccr;
  1752. TCGv sr;
  1753. ccr = gen_get_ccr(s);
  1754. sr = tcg_temp_new();
  1755. tcg_gen_andi_i32(sr, QREG_SR, 0xffe0);
  1756. tcg_gen_or_i32(sr, sr, ccr);
  1757. return sr;
  1758. }
  1759. DISAS_INSN(strldsr)
  1760. {
  1761. uint16_t ext;
  1762. uint32_t addr;
  1763. addr = s->pc - 2;
  1764. ext = cpu_lduw_code(env, s->pc);
  1765. s->pc += 2;
  1766. if (ext != 0x46FC) {
  1767. gen_exception(s, addr, EXCP_UNSUPPORTED);
  1768. return;
  1769. }
  1770. ext = cpu_lduw_code(env, s->pc);
  1771. s->pc += 2;
  1772. if (IS_USER(s) || (ext & SR_S) == 0) {
  1773. gen_exception(s, addr, EXCP_PRIVILEGE);
  1774. return;
  1775. }
  1776. gen_push(s, gen_get_sr(s));
  1777. gen_set_sr_im(s, ext, 0);
  1778. }
  1779. DISAS_INSN(move_from_sr)
  1780. {
  1781. TCGv reg;
  1782. TCGv sr;
  1783. if (IS_USER(s)) {
  1784. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1785. return;
  1786. }
  1787. sr = gen_get_sr(s);
  1788. reg = DREG(insn, 0);
  1789. gen_partset_reg(OS_WORD, reg, sr);
  1790. }
  1791. DISAS_INSN(move_to_sr)
  1792. {
  1793. if (IS_USER(s)) {
  1794. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1795. return;
  1796. }
  1797. gen_set_sr(env, s, insn, 0);
  1798. gen_lookup_tb(s);
  1799. }
  1800. DISAS_INSN(move_from_usp)
  1801. {
  1802. if (IS_USER(s)) {
  1803. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1804. return;
  1805. }
  1806. tcg_gen_ld_i32(AREG(insn, 0), cpu_env,
  1807. offsetof(CPUM68KState, sp[M68K_USP]));
  1808. }
  1809. DISAS_INSN(move_to_usp)
  1810. {
  1811. if (IS_USER(s)) {
  1812. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1813. return;
  1814. }
  1815. tcg_gen_st_i32(AREG(insn, 0), cpu_env,
  1816. offsetof(CPUM68KState, sp[M68K_USP]));
  1817. }
  1818. DISAS_INSN(halt)
  1819. {
  1820. gen_exception(s, s->pc, EXCP_HALT_INSN);
  1821. }
  1822. DISAS_INSN(stop)
  1823. {
  1824. uint16_t ext;
  1825. if (IS_USER(s)) {
  1826. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1827. return;
  1828. }
  1829. ext = cpu_lduw_code(env, s->pc);
  1830. s->pc += 2;
  1831. gen_set_sr_im(s, ext, 0);
  1832. tcg_gen_movi_i32(cpu_halted, 1);
  1833. gen_exception(s, s->pc, EXCP_HLT);
  1834. }
  1835. DISAS_INSN(rte)
  1836. {
  1837. if (IS_USER(s)) {
  1838. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1839. return;
  1840. }
  1841. gen_exception(s, s->pc - 2, EXCP_RTE);
  1842. }
  1843. DISAS_INSN(movec)
  1844. {
  1845. uint16_t ext;
  1846. TCGv reg;
  1847. if (IS_USER(s)) {
  1848. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1849. return;
  1850. }
  1851. ext = cpu_lduw_code(env, s->pc);
  1852. s->pc += 2;
  1853. if (ext & 0x8000) {
  1854. reg = AREG(ext, 12);
  1855. } else {
  1856. reg = DREG(ext, 12);
  1857. }
  1858. gen_helper_movec(cpu_env, tcg_const_i32(ext & 0xfff), reg);
  1859. gen_lookup_tb(s);
  1860. }
  1861. DISAS_INSN(intouch)
  1862. {
  1863. if (IS_USER(s)) {
  1864. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1865. return;
  1866. }
  1867. /* ICache fetch. Implement as no-op. */
  1868. }
  1869. DISAS_INSN(cpushl)
  1870. {
  1871. if (IS_USER(s)) {
  1872. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1873. return;
  1874. }
  1875. /* Cache push/invalidate. Implement as no-op. */
  1876. }
  1877. DISAS_INSN(wddata)
  1878. {
  1879. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1880. }
  1881. DISAS_INSN(wdebug)
  1882. {
  1883. M68kCPU *cpu = m68k_env_get_cpu(env);
  1884. if (IS_USER(s)) {
  1885. gen_exception(s, s->pc - 2, EXCP_PRIVILEGE);
  1886. return;
  1887. }
  1888. /* TODO: Implement wdebug. */
  1889. cpu_abort(CPU(cpu), "WDEBUG not implemented");
  1890. }
  1891. DISAS_INSN(trap)
  1892. {
  1893. gen_exception(s, s->pc - 2, EXCP_TRAP0 + (insn & 0xf));
  1894. }
  1895. /* ??? FP exceptions are not implemented. Most exceptions are deferred until
  1896. immediately before the next FP instruction is executed. */
  1897. DISAS_INSN(fpu)
  1898. {
  1899. uint16_t ext;
  1900. int32_t offset;
  1901. int opmode;
  1902. TCGv_i64 src;
  1903. TCGv_i64 dest;
  1904. TCGv_i64 res;
  1905. TCGv tmp32;
  1906. int round;
  1907. int set_dest;
  1908. int opsize;
  1909. ext = cpu_lduw_code(env, s->pc);
  1910. s->pc += 2;
  1911. opmode = ext & 0x7f;
  1912. switch ((ext >> 13) & 7) {
  1913. case 0: case 2:
  1914. break;
  1915. case 1:
  1916. goto undef;
  1917. case 3: /* fmove out */
  1918. src = FREG(ext, 7);
  1919. tmp32 = tcg_temp_new_i32();
  1920. /* fmove */
  1921. /* ??? TODO: Proper behavior on overflow. */
  1922. switch ((ext >> 10) & 7) {
  1923. case 0:
  1924. opsize = OS_LONG;
  1925. gen_helper_f64_to_i32(tmp32, cpu_env, src);
  1926. break;
  1927. case 1:
  1928. opsize = OS_SINGLE;
  1929. gen_helper_f64_to_f32(tmp32, cpu_env, src);
  1930. break;
  1931. case 4:
  1932. opsize = OS_WORD;
  1933. gen_helper_f64_to_i32(tmp32, cpu_env, src);
  1934. break;
  1935. case 5: /* OS_DOUBLE */
  1936. tcg_gen_mov_i32(tmp32, AREG(insn, 0));
  1937. switch ((insn >> 3) & 7) {
  1938. case 2:
  1939. case 3:
  1940. break;
  1941. case 4:
  1942. tcg_gen_addi_i32(tmp32, tmp32, -8);
  1943. break;
  1944. case 5:
  1945. offset = cpu_ldsw_code(env, s->pc);
  1946. s->pc += 2;
  1947. tcg_gen_addi_i32(tmp32, tmp32, offset);
  1948. break;
  1949. default:
  1950. goto undef;
  1951. }
  1952. gen_store64(s, tmp32, src);
  1953. switch ((insn >> 3) & 7) {
  1954. case 3:
  1955. tcg_gen_addi_i32(tmp32, tmp32, 8);
  1956. tcg_gen_mov_i32(AREG(insn, 0), tmp32);
  1957. break;
  1958. case 4:
  1959. tcg_gen_mov_i32(AREG(insn, 0), tmp32);
  1960. break;
  1961. }
  1962. tcg_temp_free_i32(tmp32);
  1963. return;
  1964. case 6:
  1965. opsize = OS_BYTE;
  1966. gen_helper_f64_to_i32(tmp32, cpu_env, src);
  1967. break;
  1968. default:
  1969. goto undef;
  1970. }
  1971. DEST_EA(env, insn, opsize, tmp32, NULL);
  1972. tcg_temp_free_i32(tmp32);
  1973. return;
  1974. case 4: /* fmove to control register. */
  1975. switch ((ext >> 10) & 7) {
  1976. case 4: /* FPCR */
  1977. /* Not implemented. Ignore writes. */
  1978. break;
  1979. case 1: /* FPIAR */
  1980. case 2: /* FPSR */
  1981. default:
  1982. cpu_abort(NULL, "Unimplemented: fmove to control %d",
  1983. (ext >> 10) & 7);
  1984. }
  1985. break;
  1986. case 5: /* fmove from control register. */
  1987. switch ((ext >> 10) & 7) {
  1988. case 4: /* FPCR */
  1989. /* Not implemented. Always return zero. */
  1990. tmp32 = tcg_const_i32(0);
  1991. break;
  1992. case 1: /* FPIAR */
  1993. case 2: /* FPSR */
  1994. default:
  1995. cpu_abort(NULL, "Unimplemented: fmove from control %d",
  1996. (ext >> 10) & 7);
  1997. goto undef;
  1998. }
  1999. DEST_EA(env, insn, OS_LONG, tmp32, NULL);
  2000. break;
  2001. case 6: /* fmovem */
  2002. case 7:
  2003. {
  2004. TCGv addr;
  2005. uint16_t mask;
  2006. int i;
  2007. if ((ext & 0x1f00) != 0x1000 || (ext & 0xff) == 0)
  2008. goto undef;
  2009. tmp32 = gen_lea(env, s, insn, OS_LONG);
  2010. if (IS_NULL_QREG(tmp32)) {
  2011. gen_addr_fault(s);
  2012. return;
  2013. }
  2014. addr = tcg_temp_new_i32();
  2015. tcg_gen_mov_i32(addr, tmp32);
  2016. mask = 0x80;
  2017. for (i = 0; i < 8; i++) {
  2018. if (ext & mask) {
  2019. dest = FREG(i, 0);
  2020. if (ext & (1 << 13)) {
  2021. /* store */
  2022. tcg_gen_qemu_stf64(dest, addr, IS_USER(s));
  2023. } else {
  2024. /* load */
  2025. tcg_gen_qemu_ldf64(dest, addr, IS_USER(s));
  2026. }
  2027. if (ext & (mask - 1))
  2028. tcg_gen_addi_i32(addr, addr, 8);
  2029. }
  2030. mask >>= 1;
  2031. }
  2032. tcg_temp_free_i32(addr);
  2033. }
  2034. return;
  2035. }
  2036. if (ext & (1 << 14)) {
  2037. /* Source effective address. */
  2038. switch ((ext >> 10) & 7) {
  2039. case 0: opsize = OS_LONG; break;
  2040. case 1: opsize = OS_SINGLE; break;
  2041. case 4: opsize = OS_WORD; break;
  2042. case 5: opsize = OS_DOUBLE; break;
  2043. case 6: opsize = OS_BYTE; break;
  2044. default:
  2045. goto undef;
  2046. }
  2047. if (opsize == OS_DOUBLE) {
  2048. tmp32 = tcg_temp_new_i32();
  2049. tcg_gen_mov_i32(tmp32, AREG(insn, 0));
  2050. switch ((insn >> 3) & 7) {
  2051. case 2:
  2052. case 3:
  2053. break;
  2054. case 4:
  2055. tcg_gen_addi_i32(tmp32, tmp32, -8);
  2056. break;
  2057. case 5:
  2058. offset = cpu_ldsw_code(env, s->pc);
  2059. s->pc += 2;
  2060. tcg_gen_addi_i32(tmp32, tmp32, offset);
  2061. break;
  2062. case 7:
  2063. offset = cpu_ldsw_code(env, s->pc);
  2064. offset += s->pc - 2;
  2065. s->pc += 2;
  2066. tcg_gen_addi_i32(tmp32, tmp32, offset);
  2067. break;
  2068. default:
  2069. goto undef;
  2070. }
  2071. src = gen_load64(s, tmp32);
  2072. switch ((insn >> 3) & 7) {
  2073. case 3:
  2074. tcg_gen_addi_i32(tmp32, tmp32, 8);
  2075. tcg_gen_mov_i32(AREG(insn, 0), tmp32);
  2076. break;
  2077. case 4:
  2078. tcg_gen_mov_i32(AREG(insn, 0), tmp32);
  2079. break;
  2080. }
  2081. tcg_temp_free_i32(tmp32);
  2082. } else {
  2083. SRC_EA(env, tmp32, opsize, 1, NULL);
  2084. src = tcg_temp_new_i64();
  2085. switch (opsize) {
  2086. case OS_LONG:
  2087. case OS_WORD:
  2088. case OS_BYTE:
  2089. gen_helper_i32_to_f64(src, cpu_env, tmp32);
  2090. break;
  2091. case OS_SINGLE:
  2092. gen_helper_f32_to_f64(src, cpu_env, tmp32);
  2093. break;
  2094. }
  2095. }
  2096. } else {
  2097. /* Source register. */
  2098. src = FREG(ext, 10);
  2099. }
  2100. dest = FREG(ext, 7);
  2101. res = tcg_temp_new_i64();
  2102. if (opmode != 0x3a)
  2103. tcg_gen_mov_f64(res, dest);
  2104. round = 1;
  2105. set_dest = 1;
  2106. switch (opmode) {
  2107. case 0: case 0x40: case 0x44: /* fmove */
  2108. tcg_gen_mov_f64(res, src);
  2109. break;
  2110. case 1: /* fint */
  2111. gen_helper_iround_f64(res, cpu_env, src);
  2112. round = 0;
  2113. break;
  2114. case 3: /* fintrz */
  2115. gen_helper_itrunc_f64(res, cpu_env, src);
  2116. round = 0;
  2117. break;
  2118. case 4: case 0x41: case 0x45: /* fsqrt */
  2119. gen_helper_sqrt_f64(res, cpu_env, src);
  2120. break;
  2121. case 0x18: case 0x58: case 0x5c: /* fabs */
  2122. gen_helper_abs_f64(res, src);
  2123. break;
  2124. case 0x1a: case 0x5a: case 0x5e: /* fneg */
  2125. gen_helper_chs_f64(res, src);
  2126. break;
  2127. case 0x20: case 0x60: case 0x64: /* fdiv */
  2128. gen_helper_div_f64(res, cpu_env, res, src);
  2129. break;
  2130. case 0x22: case 0x62: case 0x66: /* fadd */
  2131. gen_helper_add_f64(res, cpu_env, res, src);
  2132. break;
  2133. case 0x23: case 0x63: case 0x67: /* fmul */
  2134. gen_helper_mul_f64(res, cpu_env, res, src);
  2135. break;
  2136. case 0x28: case 0x68: case 0x6c: /* fsub */
  2137. gen_helper_sub_f64(res, cpu_env, res, src);
  2138. break;
  2139. case 0x38: /* fcmp */
  2140. gen_helper_sub_cmp_f64(res, cpu_env, res, src);
  2141. set_dest = 0;
  2142. round = 0;
  2143. break;
  2144. case 0x3a: /* ftst */
  2145. tcg_gen_mov_f64(res, src);
  2146. set_dest = 0;
  2147. round = 0;
  2148. break;
  2149. default:
  2150. goto undef;
  2151. }
  2152. if (ext & (1 << 14)) {
  2153. tcg_temp_free_i64(src);
  2154. }
  2155. if (round) {
  2156. if (opmode & 0x40) {
  2157. if ((opmode & 0x4) != 0)
  2158. round = 0;
  2159. } else if ((s->fpcr & M68K_FPCR_PREC) == 0) {
  2160. round = 0;
  2161. }
  2162. }
  2163. if (round) {
  2164. TCGv tmp = tcg_temp_new_i32();
  2165. gen_helper_f64_to_f32(tmp, cpu_env, res);
  2166. gen_helper_f32_to_f64(res, cpu_env, tmp);
  2167. tcg_temp_free_i32(tmp);
  2168. }
  2169. tcg_gen_mov_f64(QREG_FP_RESULT, res);
  2170. if (set_dest) {
  2171. tcg_gen_mov_f64(dest, res);
  2172. }
  2173. tcg_temp_free_i64(res);
  2174. return;
  2175. undef:
  2176. /* FIXME: Is this right for offset addressing modes? */
  2177. s->pc -= 2;
  2178. disas_undef_fpu(env, s, insn);
  2179. }
  2180. DISAS_INSN(fbcc)
  2181. {
  2182. uint32_t offset;
  2183. uint32_t addr;
  2184. TCGv flag;
  2185. TCGLabel *l1;
  2186. addr = s->pc;
  2187. offset = cpu_ldsw_code(env, s->pc);
  2188. s->pc += 2;
  2189. if (insn & (1 << 6)) {
  2190. offset = (offset << 16) | cpu_lduw_code(env, s->pc);
  2191. s->pc += 2;
  2192. }
  2193. l1 = gen_new_label();
  2194. /* TODO: Raise BSUN exception. */
  2195. flag = tcg_temp_new();
  2196. gen_helper_compare_f64(flag, cpu_env, QREG_FP_RESULT);
  2197. /* Jump to l1 if condition is true. */
  2198. switch (insn & 0xf) {
  2199. case 0: /* f */
  2200. break;
  2201. case 1: /* eq (=0) */
  2202. tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
  2203. break;
  2204. case 2: /* ogt (=1) */
  2205. tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(1), l1);
  2206. break;
  2207. case 3: /* oge (=0 or =1) */
  2208. tcg_gen_brcond_i32(TCG_COND_LEU, flag, tcg_const_i32(1), l1);
  2209. break;
  2210. case 4: /* olt (=-1) */
  2211. tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(0), l1);
  2212. break;
  2213. case 5: /* ole (=-1 or =0) */
  2214. tcg_gen_brcond_i32(TCG_COND_LE, flag, tcg_const_i32(0), l1);
  2215. break;
  2216. case 6: /* ogl (=-1 or =1) */
  2217. tcg_gen_andi_i32(flag, flag, 1);
  2218. tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
  2219. break;
  2220. case 7: /* or (=2) */
  2221. tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(2), l1);
  2222. break;
  2223. case 8: /* un (<2) */
  2224. tcg_gen_brcond_i32(TCG_COND_LT, flag, tcg_const_i32(2), l1);
  2225. break;
  2226. case 9: /* ueq (=0 or =2) */
  2227. tcg_gen_andi_i32(flag, flag, 1);
  2228. tcg_gen_brcond_i32(TCG_COND_EQ, flag, tcg_const_i32(0), l1);
  2229. break;
  2230. case 10: /* ugt (>0) */
  2231. tcg_gen_brcond_i32(TCG_COND_GT, flag, tcg_const_i32(0), l1);
  2232. break;
  2233. case 11: /* uge (>=0) */
  2234. tcg_gen_brcond_i32(TCG_COND_GE, flag, tcg_const_i32(0), l1);
  2235. break;
  2236. case 12: /* ult (=-1 or =2) */
  2237. tcg_gen_brcond_i32(TCG_COND_GEU, flag, tcg_const_i32(2), l1);
  2238. break;
  2239. case 13: /* ule (!=1) */
  2240. tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(1), l1);
  2241. break;
  2242. case 14: /* ne (!=0) */
  2243. tcg_gen_brcond_i32(TCG_COND_NE, flag, tcg_const_i32(0), l1);
  2244. break;
  2245. case 15: /* t */
  2246. tcg_gen_br(l1);
  2247. break;
  2248. }
  2249. gen_jmp_tb(s, 0, s->pc);
  2250. gen_set_label(l1);
  2251. gen_jmp_tb(s, 1, addr + offset);
  2252. }
  2253. DISAS_INSN(frestore)
  2254. {
  2255. M68kCPU *cpu = m68k_env_get_cpu(env);
  2256. /* TODO: Implement frestore. */
  2257. cpu_abort(CPU(cpu), "FRESTORE not implemented");
  2258. }
  2259. DISAS_INSN(fsave)
  2260. {
  2261. M68kCPU *cpu = m68k_env_get_cpu(env);
  2262. /* TODO: Implement fsave. */
  2263. cpu_abort(CPU(cpu), "FSAVE not implemented");
  2264. }
  2265. static inline TCGv gen_mac_extract_word(DisasContext *s, TCGv val, int upper)
  2266. {
  2267. TCGv tmp = tcg_temp_new();
  2268. if (s->env->macsr & MACSR_FI) {
  2269. if (upper)
  2270. tcg_gen_andi_i32(tmp, val, 0xffff0000);
  2271. else
  2272. tcg_gen_shli_i32(tmp, val, 16);
  2273. } else if (s->env->macsr & MACSR_SU) {
  2274. if (upper)
  2275. tcg_gen_sari_i32(tmp, val, 16);
  2276. else
  2277. tcg_gen_ext16s_i32(tmp, val);
  2278. } else {
  2279. if (upper)
  2280. tcg_gen_shri_i32(tmp, val, 16);
  2281. else
  2282. tcg_gen_ext16u_i32(tmp, val);
  2283. }
  2284. return tmp;
  2285. }
  2286. static void gen_mac_clear_flags(void)
  2287. {
  2288. tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR,
  2289. ~(MACSR_V | MACSR_Z | MACSR_N | MACSR_EV));
  2290. }
  2291. DISAS_INSN(mac)
  2292. {
  2293. TCGv rx;
  2294. TCGv ry;
  2295. uint16_t ext;
  2296. int acc;
  2297. TCGv tmp;
  2298. TCGv addr;
  2299. TCGv loadval;
  2300. int dual;
  2301. TCGv saved_flags;
  2302. if (!s->done_mac) {
  2303. s->mactmp = tcg_temp_new_i64();
  2304. s->done_mac = 1;
  2305. }
  2306. ext = cpu_lduw_code(env, s->pc);
  2307. s->pc += 2;
  2308. acc = ((insn >> 7) & 1) | ((ext >> 3) & 2);
  2309. dual = ((insn & 0x30) != 0 && (ext & 3) != 0);
  2310. if (dual && !m68k_feature(s->env, M68K_FEATURE_CF_EMAC_B)) {
  2311. disas_undef(env, s, insn);
  2312. return;
  2313. }
  2314. if (insn & 0x30) {
  2315. /* MAC with load. */
  2316. tmp = gen_lea(env, s, insn, OS_LONG);
  2317. addr = tcg_temp_new();
  2318. tcg_gen_and_i32(addr, tmp, QREG_MAC_MASK);
  2319. /* Load the value now to ensure correct exception behavior.
  2320. Perform writeback after reading the MAC inputs. */
  2321. loadval = gen_load(s, OS_LONG, addr, 0);
  2322. acc ^= 1;
  2323. rx = (ext & 0x8000) ? AREG(ext, 12) : DREG(insn, 12);
  2324. ry = (ext & 8) ? AREG(ext, 0) : DREG(ext, 0);
  2325. } else {
  2326. loadval = addr = NULL_QREG;
  2327. rx = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
  2328. ry = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
  2329. }
  2330. gen_mac_clear_flags();
  2331. #if 0
  2332. l1 = -1;
  2333. /* Disabled because conditional branches clobber temporary vars. */
  2334. if ((s->env->macsr & MACSR_OMC) != 0 && !dual) {
  2335. /* Skip the multiply if we know we will ignore it. */
  2336. l1 = gen_new_label();
  2337. tmp = tcg_temp_new();
  2338. tcg_gen_andi_i32(tmp, QREG_MACSR, 1 << (acc + 8));
  2339. gen_op_jmp_nz32(tmp, l1);
  2340. }
  2341. #endif
  2342. if ((ext & 0x0800) == 0) {
  2343. /* Word. */
  2344. rx = gen_mac_extract_word(s, rx, (ext & 0x80) != 0);
  2345. ry = gen_mac_extract_word(s, ry, (ext & 0x40) != 0);
  2346. }
  2347. if (s->env->macsr & MACSR_FI) {
  2348. gen_helper_macmulf(s->mactmp, cpu_env, rx, ry);
  2349. } else {
  2350. if (s->env->macsr & MACSR_SU)
  2351. gen_helper_macmuls(s->mactmp, cpu_env, rx, ry);
  2352. else
  2353. gen_helper_macmulu(s->mactmp, cpu_env, rx, ry);
  2354. switch ((ext >> 9) & 3) {
  2355. case 1:
  2356. tcg_gen_shli_i64(s->mactmp, s->mactmp, 1);
  2357. break;
  2358. case 3:
  2359. tcg_gen_shri_i64(s->mactmp, s->mactmp, 1);
  2360. break;
  2361. }
  2362. }
  2363. if (dual) {
  2364. /* Save the overflow flag from the multiply. */
  2365. saved_flags = tcg_temp_new();
  2366. tcg_gen_mov_i32(saved_flags, QREG_MACSR);
  2367. } else {
  2368. saved_flags = NULL_QREG;
  2369. }
  2370. #if 0
  2371. /* Disabled because conditional branches clobber temporary vars. */
  2372. if ((s->env->macsr & MACSR_OMC) != 0 && dual) {
  2373. /* Skip the accumulate if the value is already saturated. */
  2374. l1 = gen_new_label();
  2375. tmp = tcg_temp_new();
  2376. gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
  2377. gen_op_jmp_nz32(tmp, l1);
  2378. }
  2379. #endif
  2380. if (insn & 0x100)
  2381. tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
  2382. else
  2383. tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
  2384. if (s->env->macsr & MACSR_FI)
  2385. gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
  2386. else if (s->env->macsr & MACSR_SU)
  2387. gen_helper_macsats(cpu_env, tcg_const_i32(acc));
  2388. else
  2389. gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
  2390. #if 0
  2391. /* Disabled because conditional branches clobber temporary vars. */
  2392. if (l1 != -1)
  2393. gen_set_label(l1);
  2394. #endif
  2395. if (dual) {
  2396. /* Dual accumulate variant. */
  2397. acc = (ext >> 2) & 3;
  2398. /* Restore the overflow flag from the multiplier. */
  2399. tcg_gen_mov_i32(QREG_MACSR, saved_flags);
  2400. #if 0
  2401. /* Disabled because conditional branches clobber temporary vars. */
  2402. if ((s->env->macsr & MACSR_OMC) != 0) {
  2403. /* Skip the accumulate if the value is already saturated. */
  2404. l1 = gen_new_label();
  2405. tmp = tcg_temp_new();
  2406. gen_op_and32(tmp, QREG_MACSR, tcg_const_i32(MACSR_PAV0 << acc));
  2407. gen_op_jmp_nz32(tmp, l1);
  2408. }
  2409. #endif
  2410. if (ext & 2)
  2411. tcg_gen_sub_i64(MACREG(acc), MACREG(acc), s->mactmp);
  2412. else
  2413. tcg_gen_add_i64(MACREG(acc), MACREG(acc), s->mactmp);
  2414. if (s->env->macsr & MACSR_FI)
  2415. gen_helper_macsatf(cpu_env, tcg_const_i32(acc));
  2416. else if (s->env->macsr & MACSR_SU)
  2417. gen_helper_macsats(cpu_env, tcg_const_i32(acc));
  2418. else
  2419. gen_helper_macsatu(cpu_env, tcg_const_i32(acc));
  2420. #if 0
  2421. /* Disabled because conditional branches clobber temporary vars. */
  2422. if (l1 != -1)
  2423. gen_set_label(l1);
  2424. #endif
  2425. }
  2426. gen_helper_mac_set_flags(cpu_env, tcg_const_i32(acc));
  2427. if (insn & 0x30) {
  2428. TCGv rw;
  2429. rw = (insn & 0x40) ? AREG(insn, 9) : DREG(insn, 9);
  2430. tcg_gen_mov_i32(rw, loadval);
  2431. /* FIXME: Should address writeback happen with the masked or
  2432. unmasked value? */
  2433. switch ((insn >> 3) & 7) {
  2434. case 3: /* Post-increment. */
  2435. tcg_gen_addi_i32(AREG(insn, 0), addr, 4);
  2436. break;
  2437. case 4: /* Pre-decrement. */
  2438. tcg_gen_mov_i32(AREG(insn, 0), addr);
  2439. }
  2440. }
  2441. }
  2442. DISAS_INSN(from_mac)
  2443. {
  2444. TCGv rx;
  2445. TCGv_i64 acc;
  2446. int accnum;
  2447. rx = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
  2448. accnum = (insn >> 9) & 3;
  2449. acc = MACREG(accnum);
  2450. if (s->env->macsr & MACSR_FI) {
  2451. gen_helper_get_macf(rx, cpu_env, acc);
  2452. } else if ((s->env->macsr & MACSR_OMC) == 0) {
  2453. tcg_gen_extrl_i64_i32(rx, acc);
  2454. } else if (s->env->macsr & MACSR_SU) {
  2455. gen_helper_get_macs(rx, acc);
  2456. } else {
  2457. gen_helper_get_macu(rx, acc);
  2458. }
  2459. if (insn & 0x40) {
  2460. tcg_gen_movi_i64(acc, 0);
  2461. tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
  2462. }
  2463. }
  2464. DISAS_INSN(move_mac)
  2465. {
  2466. /* FIXME: This can be done without a helper. */
  2467. int src;
  2468. TCGv dest;
  2469. src = insn & 3;
  2470. dest = tcg_const_i32((insn >> 9) & 3);
  2471. gen_helper_mac_move(cpu_env, dest, tcg_const_i32(src));
  2472. gen_mac_clear_flags();
  2473. gen_helper_mac_set_flags(cpu_env, dest);
  2474. }
  2475. DISAS_INSN(from_macsr)
  2476. {
  2477. TCGv reg;
  2478. reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
  2479. tcg_gen_mov_i32(reg, QREG_MACSR);
  2480. }
  2481. DISAS_INSN(from_mask)
  2482. {
  2483. TCGv reg;
  2484. reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
  2485. tcg_gen_mov_i32(reg, QREG_MAC_MASK);
  2486. }
  2487. DISAS_INSN(from_mext)
  2488. {
  2489. TCGv reg;
  2490. TCGv acc;
  2491. reg = (insn & 8) ? AREG(insn, 0) : DREG(insn, 0);
  2492. acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
  2493. if (s->env->macsr & MACSR_FI)
  2494. gen_helper_get_mac_extf(reg, cpu_env, acc);
  2495. else
  2496. gen_helper_get_mac_exti(reg, cpu_env, acc);
  2497. }
  2498. DISAS_INSN(macsr_to_ccr)
  2499. {
  2500. tcg_gen_movi_i32(QREG_CC_X, 0);
  2501. tcg_gen_andi_i32(QREG_CC_DEST, QREG_MACSR, 0xf);
  2502. s->cc_op = CC_OP_FLAGS;
  2503. }
  2504. DISAS_INSN(to_mac)
  2505. {
  2506. TCGv_i64 acc;
  2507. TCGv val;
  2508. int accnum;
  2509. accnum = (insn >> 9) & 3;
  2510. acc = MACREG(accnum);
  2511. SRC_EA(env, val, OS_LONG, 0, NULL);
  2512. if (s->env->macsr & MACSR_FI) {
  2513. tcg_gen_ext_i32_i64(acc, val);
  2514. tcg_gen_shli_i64(acc, acc, 8);
  2515. } else if (s->env->macsr & MACSR_SU) {
  2516. tcg_gen_ext_i32_i64(acc, val);
  2517. } else {
  2518. tcg_gen_extu_i32_i64(acc, val);
  2519. }
  2520. tcg_gen_andi_i32(QREG_MACSR, QREG_MACSR, ~(MACSR_PAV0 << accnum));
  2521. gen_mac_clear_flags();
  2522. gen_helper_mac_set_flags(cpu_env, tcg_const_i32(accnum));
  2523. }
  2524. DISAS_INSN(to_macsr)
  2525. {
  2526. TCGv val;
  2527. SRC_EA(env, val, OS_LONG, 0, NULL);
  2528. gen_helper_set_macsr(cpu_env, val);
  2529. gen_lookup_tb(s);
  2530. }
  2531. DISAS_INSN(to_mask)
  2532. {
  2533. TCGv val;
  2534. SRC_EA(env, val, OS_LONG, 0, NULL);
  2535. tcg_gen_ori_i32(QREG_MAC_MASK, val, 0xffff0000);
  2536. }
  2537. DISAS_INSN(to_mext)
  2538. {
  2539. TCGv val;
  2540. TCGv acc;
  2541. SRC_EA(env, val, OS_LONG, 0, NULL);
  2542. acc = tcg_const_i32((insn & 0x400) ? 2 : 0);
  2543. if (s->env->macsr & MACSR_FI)
  2544. gen_helper_set_mac_extf(cpu_env, val, acc);
  2545. else if (s->env->macsr & MACSR_SU)
  2546. gen_helper_set_mac_exts(cpu_env, val, acc);
  2547. else
  2548. gen_helper_set_mac_extu(cpu_env, val, acc);
  2549. }
  2550. static disas_proc opcode_table[65536];
  2551. static void
  2552. register_opcode (disas_proc proc, uint16_t opcode, uint16_t mask)
  2553. {
  2554. int i;
  2555. int from;
  2556. int to;
  2557. /* Sanity check. All set bits must be included in the mask. */
  2558. if (opcode & ~mask) {
  2559. fprintf(stderr,
  2560. "qemu internal error: bogus opcode definition %04x/%04x\n",
  2561. opcode, mask);
  2562. abort();
  2563. }
  2564. /* This could probably be cleverer. For now just optimize the case where
  2565. the top bits are known. */
  2566. /* Find the first zero bit in the mask. */
  2567. i = 0x8000;
  2568. while ((i & mask) != 0)
  2569. i >>= 1;
  2570. /* Iterate over all combinations of this and lower bits. */
  2571. if (i == 0)
  2572. i = 1;
  2573. else
  2574. i <<= 1;
  2575. from = opcode & ~(i - 1);
  2576. to = from + i;
  2577. for (i = from; i < to; i++) {
  2578. if ((i & mask) == opcode)
  2579. opcode_table[i] = proc;
  2580. }
  2581. }
  2582. /* Register m68k opcode handlers. Order is important.
  2583. Later insn override earlier ones. */
  2584. void register_m68k_insns (CPUM68KState *env)
  2585. {
  2586. #define INSN(name, opcode, mask, feature) do { \
  2587. if (m68k_feature(env, M68K_FEATURE_##feature)) \
  2588. register_opcode(disas_##name, 0x##opcode, 0x##mask); \
  2589. } while(0)
  2590. INSN(undef, 0000, 0000, CF_ISA_A);
  2591. INSN(arith_im, 0080, fff8, CF_ISA_A);
  2592. INSN(bitrev, 00c0, fff8, CF_ISA_APLUSC);
  2593. INSN(bitop_reg, 0100, f1c0, CF_ISA_A);
  2594. INSN(bitop_reg, 0140, f1c0, CF_ISA_A);
  2595. INSN(bitop_reg, 0180, f1c0, CF_ISA_A);
  2596. INSN(bitop_reg, 01c0, f1c0, CF_ISA_A);
  2597. INSN(arith_im, 0280, fff8, CF_ISA_A);
  2598. INSN(byterev, 02c0, fff8, CF_ISA_APLUSC);
  2599. INSN(arith_im, 0480, fff8, CF_ISA_A);
  2600. INSN(ff1, 04c0, fff8, CF_ISA_APLUSC);
  2601. INSN(arith_im, 0680, fff8, CF_ISA_A);
  2602. INSN(bitop_im, 0800, ffc0, CF_ISA_A);
  2603. INSN(bitop_im, 0840, ffc0, CF_ISA_A);
  2604. INSN(bitop_im, 0880, ffc0, CF_ISA_A);
  2605. INSN(bitop_im, 08c0, ffc0, CF_ISA_A);
  2606. INSN(arith_im, 0a80, fff8, CF_ISA_A);
  2607. INSN(arith_im, 0c00, ff38, CF_ISA_A);
  2608. INSN(move, 1000, f000, CF_ISA_A);
  2609. INSN(move, 2000, f000, CF_ISA_A);
  2610. INSN(move, 3000, f000, CF_ISA_A);
  2611. INSN(strldsr, 40e7, ffff, CF_ISA_APLUSC);
  2612. INSN(negx, 4080, fff8, CF_ISA_A);
  2613. INSN(move_from_sr, 40c0, fff8, CF_ISA_A);
  2614. INSN(lea, 41c0, f1c0, CF_ISA_A);
  2615. INSN(clr, 4200, ff00, CF_ISA_A);
  2616. INSN(undef, 42c0, ffc0, CF_ISA_A);
  2617. INSN(move_from_ccr, 42c0, fff8, CF_ISA_A);
  2618. INSN(neg, 4480, fff8, CF_ISA_A);
  2619. INSN(move_to_ccr, 44c0, ffc0, CF_ISA_A);
  2620. INSN(not, 4680, fff8, CF_ISA_A);
  2621. INSN(move_to_sr, 46c0, ffc0, CF_ISA_A);
  2622. INSN(pea, 4840, ffc0, CF_ISA_A);
  2623. INSN(swap, 4840, fff8, CF_ISA_A);
  2624. INSN(movem, 48c0, fbc0, CF_ISA_A);
  2625. INSN(ext, 4880, fff8, CF_ISA_A);
  2626. INSN(ext, 48c0, fff8, CF_ISA_A);
  2627. INSN(ext, 49c0, fff8, CF_ISA_A);
  2628. INSN(tst, 4a00, ff00, CF_ISA_A);
  2629. INSN(tas, 4ac0, ffc0, CF_ISA_B);
  2630. INSN(halt, 4ac8, ffff, CF_ISA_A);
  2631. INSN(pulse, 4acc, ffff, CF_ISA_A);
  2632. INSN(illegal, 4afc, ffff, CF_ISA_A);
  2633. INSN(mull, 4c00, ffc0, CF_ISA_A);
  2634. INSN(divl, 4c40, ffc0, CF_ISA_A);
  2635. INSN(sats, 4c80, fff8, CF_ISA_B);
  2636. INSN(trap, 4e40, fff0, CF_ISA_A);
  2637. INSN(link, 4e50, fff8, CF_ISA_A);
  2638. INSN(unlk, 4e58, fff8, CF_ISA_A);
  2639. INSN(move_to_usp, 4e60, fff8, USP);
  2640. INSN(move_from_usp, 4e68, fff8, USP);
  2641. INSN(nop, 4e71, ffff, CF_ISA_A);
  2642. INSN(stop, 4e72, ffff, CF_ISA_A);
  2643. INSN(rte, 4e73, ffff, CF_ISA_A);
  2644. INSN(rts, 4e75, ffff, CF_ISA_A);
  2645. INSN(movec, 4e7b, ffff, CF_ISA_A);
  2646. INSN(jump, 4e80, ffc0, CF_ISA_A);
  2647. INSN(jump, 4ec0, ffc0, CF_ISA_A);
  2648. INSN(addsubq, 5180, f1c0, CF_ISA_A);
  2649. INSN(scc, 50c0, f0f8, CF_ISA_A);
  2650. INSN(addsubq, 5080, f1c0, CF_ISA_A);
  2651. INSN(tpf, 51f8, fff8, CF_ISA_A);
  2652. /* Branch instructions. */
  2653. INSN(branch, 6000, f000, CF_ISA_A);
  2654. /* Disable long branch instructions, then add back the ones we want. */
  2655. INSN(undef, 60ff, f0ff, CF_ISA_A); /* All long branches. */
  2656. INSN(branch, 60ff, f0ff, CF_ISA_B);
  2657. INSN(undef, 60ff, ffff, CF_ISA_B); /* bra.l */
  2658. INSN(branch, 60ff, ffff, BRAL);
  2659. INSN(moveq, 7000, f100, CF_ISA_A);
  2660. INSN(mvzs, 7100, f100, CF_ISA_B);
  2661. INSN(or, 8000, f000, CF_ISA_A);
  2662. INSN(divw, 80c0, f0c0, CF_ISA_A);
  2663. INSN(addsub, 9000, f000, CF_ISA_A);
  2664. INSN(subx, 9180, f1f8, CF_ISA_A);
  2665. INSN(suba, 91c0, f1c0, CF_ISA_A);
  2666. INSN(undef_mac, a000, f000, CF_ISA_A);
  2667. INSN(mac, a000, f100, CF_EMAC);
  2668. INSN(from_mac, a180, f9b0, CF_EMAC);
  2669. INSN(move_mac, a110, f9fc, CF_EMAC);
  2670. INSN(from_macsr,a980, f9f0, CF_EMAC);
  2671. INSN(from_mask, ad80, fff0, CF_EMAC);
  2672. INSN(from_mext, ab80, fbf0, CF_EMAC);
  2673. INSN(macsr_to_ccr, a9c0, ffff, CF_EMAC);
  2674. INSN(to_mac, a100, f9c0, CF_EMAC);
  2675. INSN(to_macsr, a900, ffc0, CF_EMAC);
  2676. INSN(to_mext, ab00, fbc0, CF_EMAC);
  2677. INSN(to_mask, ad00, ffc0, CF_EMAC);
  2678. INSN(mov3q, a140, f1c0, CF_ISA_B);
  2679. INSN(cmp, b000, f1c0, CF_ISA_B); /* cmp.b */
  2680. INSN(cmp, b040, f1c0, CF_ISA_B); /* cmp.w */
  2681. INSN(cmpa, b0c0, f1c0, CF_ISA_B); /* cmpa.w */
  2682. INSN(cmp, b080, f1c0, CF_ISA_A);
  2683. INSN(cmpa, b1c0, f1c0, CF_ISA_A);
  2684. INSN(eor, b180, f1c0, CF_ISA_A);
  2685. INSN(and, c000, f000, CF_ISA_A);
  2686. INSN(mulw, c0c0, f0c0, CF_ISA_A);
  2687. INSN(addsub, d000, f000, CF_ISA_A);
  2688. INSN(addx, d180, f1f8, CF_ISA_A);
  2689. INSN(adda, d1c0, f1c0, CF_ISA_A);
  2690. INSN(shift_im, e080, f0f0, CF_ISA_A);
  2691. INSN(shift_reg, e0a0, f0f0, CF_ISA_A);
  2692. INSN(undef_fpu, f000, f000, CF_ISA_A);
  2693. INSN(fpu, f200, ffc0, CF_FPU);
  2694. INSN(fbcc, f280, ffc0, CF_FPU);
  2695. INSN(frestore, f340, ffc0, CF_FPU);
  2696. INSN(fsave, f340, ffc0, CF_FPU);
  2697. INSN(intouch, f340, ffc0, CF_ISA_A);
  2698. INSN(cpushl, f428, ff38, CF_ISA_A);
  2699. INSN(wddata, fb00, ff00, CF_ISA_A);
  2700. INSN(wdebug, fbc0, ffc0, CF_ISA_A);
  2701. #undef INSN
  2702. }
  2703. /* ??? Some of this implementation is not exception safe. We should always
  2704. write back the result to memory before setting the condition codes. */
  2705. static void disas_m68k_insn(CPUM68KState * env, DisasContext *s)
  2706. {
  2707. uint16_t insn;
  2708. insn = cpu_lduw_code(env, s->pc);
  2709. s->pc += 2;
  2710. opcode_table[insn](env, s, insn);
  2711. }
  2712. /* generate intermediate code for basic block 'tb'. */
  2713. void gen_intermediate_code(CPUM68KState *env, TranslationBlock *tb)
  2714. {
  2715. M68kCPU *cpu = m68k_env_get_cpu(env);
  2716. CPUState *cs = CPU(cpu);
  2717. DisasContext dc1, *dc = &dc1;
  2718. target_ulong pc_start;
  2719. int pc_offset;
  2720. int num_insns;
  2721. int max_insns;
  2722. /* generate intermediate code */
  2723. pc_start = tb->pc;
  2724. dc->tb = tb;
  2725. dc->env = env;
  2726. dc->is_jmp = DISAS_NEXT;
  2727. dc->pc = pc_start;
  2728. dc->cc_op = CC_OP_DYNAMIC;
  2729. dc->singlestep_enabled = cs->singlestep_enabled;
  2730. dc->fpcr = env->fpcr;
  2731. dc->user = (env->sr & SR_S) == 0;
  2732. dc->done_mac = 0;
  2733. num_insns = 0;
  2734. max_insns = tb->cflags & CF_COUNT_MASK;
  2735. if (max_insns == 0) {
  2736. max_insns = CF_COUNT_MASK;
  2737. }
  2738. if (max_insns > TCG_MAX_INSNS) {
  2739. max_insns = TCG_MAX_INSNS;
  2740. }
  2741. gen_tb_start(tb);
  2742. do {
  2743. pc_offset = dc->pc - pc_start;
  2744. gen_throws_exception = NULL;
  2745. tcg_gen_insn_start(dc->pc);
  2746. num_insns++;
  2747. if (unlikely(cpu_breakpoint_test(cs, dc->pc, BP_ANY))) {
  2748. gen_exception(dc, dc->pc, EXCP_DEBUG);
  2749. dc->is_jmp = DISAS_JUMP;
  2750. break;
  2751. }
  2752. if (num_insns == max_insns && (tb->cflags & CF_LAST_IO)) {
  2753. gen_io_start();
  2754. }
  2755. dc->insn_pc = dc->pc;
  2756. disas_m68k_insn(env, dc);
  2757. } while (!dc->is_jmp && !tcg_op_buf_full() &&
  2758. !cs->singlestep_enabled &&
  2759. !singlestep &&
  2760. (pc_offset) < (TARGET_PAGE_SIZE - 32) &&
  2761. num_insns < max_insns);
  2762. if (tb->cflags & CF_LAST_IO)
  2763. gen_io_end();
  2764. if (unlikely(cs->singlestep_enabled)) {
  2765. /* Make sure the pc is updated, and raise a debug exception. */
  2766. if (!dc->is_jmp) {
  2767. gen_flush_cc_op(dc);
  2768. tcg_gen_movi_i32(QREG_PC, dc->pc);
  2769. }
  2770. gen_helper_raise_exception(cpu_env, tcg_const_i32(EXCP_DEBUG));
  2771. } else {
  2772. switch(dc->is_jmp) {
  2773. case DISAS_NEXT:
  2774. gen_flush_cc_op(dc);
  2775. gen_jmp_tb(dc, 0, dc->pc);
  2776. break;
  2777. default:
  2778. case DISAS_JUMP:
  2779. case DISAS_UPDATE:
  2780. gen_flush_cc_op(dc);
  2781. /* indicate that the hash table must be used to find the next TB */
  2782. tcg_gen_exit_tb(0);
  2783. break;
  2784. case DISAS_TB_JUMP:
  2785. /* nothing more to generate */
  2786. break;
  2787. }
  2788. }
  2789. gen_tb_end(tb, num_insns);
  2790. #ifdef DEBUG_DISAS
  2791. if (qemu_loglevel_mask(CPU_LOG_TB_IN_ASM)) {
  2792. qemu_log("----------------\n");
  2793. qemu_log("IN: %s\n", lookup_symbol(pc_start));
  2794. log_target_disas(cs, pc_start, dc->pc - pc_start, 0);
  2795. qemu_log("\n");
  2796. }
  2797. #endif
  2798. tb->size = dc->pc - pc_start;
  2799. tb->icount = num_insns;
  2800. }
  2801. void m68k_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
  2802. int flags)
  2803. {
  2804. M68kCPU *cpu = M68K_CPU(cs);
  2805. CPUM68KState *env = &cpu->env;
  2806. int i;
  2807. uint16_t sr;
  2808. CPU_DoubleU u;
  2809. for (i = 0; i < 8; i++)
  2810. {
  2811. u.d = env->fregs[i];
  2812. cpu_fprintf (f, "D%d = %08x A%d = %08x F%d = %08x%08x (%12g)\n",
  2813. i, env->dregs[i], i, env->aregs[i],
  2814. i, u.l.upper, u.l.lower, *(double *)&u.d);
  2815. }
  2816. cpu_fprintf (f, "PC = %08x ", env->pc);
  2817. sr = env->sr;
  2818. cpu_fprintf (f, "SR = %04x %c%c%c%c%c ", sr, (sr & 0x10) ? 'X' : '-',
  2819. (sr & CCF_N) ? 'N' : '-', (sr & CCF_Z) ? 'Z' : '-',
  2820. (sr & CCF_V) ? 'V' : '-', (sr & CCF_C) ? 'C' : '-');
  2821. cpu_fprintf (f, "FPRESULT = %12g\n", *(double *)&env->fp_result);
  2822. }
  2823. void restore_state_to_opc(CPUM68KState *env, TranslationBlock *tb,
  2824. target_ulong *data)
  2825. {
  2826. env->pc = data[0];
  2827. }