CGBuiltin.cpp 160 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323
  1. //===---- CGBuiltin.cpp - Emit LLVM Code for builtins ---------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This contains code to emit Builtin calls as LLVM code.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "TargetInfo.h"
  14. #include "CodeGenFunction.h"
  15. #include "CodeGenModule.h"
  16. #include "CGObjCRuntime.h"
  17. #include "clang/Basic/TargetInfo.h"
  18. #include "clang/AST/ASTContext.h"
  19. #include "clang/AST/Decl.h"
  20. #include "clang/Basic/TargetBuiltins.h"
  21. #include "llvm/Intrinsics.h"
  22. #include "llvm/Target/TargetData.h"
  23. using namespace clang;
  24. using namespace CodeGen;
  25. using namespace llvm;
  26. /// getBuiltinLibFunction - Given a builtin id for a function like
  27. /// "__builtin_fabsf", return a Function* for "fabsf".
  28. llvm::Value *CodeGenModule::getBuiltinLibFunction(const FunctionDecl *FD,
  29. unsigned BuiltinID) {
  30. assert(Context.BuiltinInfo.isLibFunction(BuiltinID));
  31. // Get the name, skip over the __builtin_ prefix (if necessary).
  32. StringRef Name;
  33. GlobalDecl D(FD);
  34. // If the builtin has been declared explicitly with an assembler label,
  35. // use the mangled name. This differs from the plain label on platforms
  36. // that prefix labels.
  37. if (FD->hasAttr<AsmLabelAttr>())
  38. Name = getMangledName(D);
  39. else
  40. Name = Context.BuiltinInfo.GetName(BuiltinID) + 10;
  41. llvm::FunctionType *Ty =
  42. cast<llvm::FunctionType>(getTypes().ConvertType(FD->getType()));
  43. return GetOrCreateLLVMFunction(Name, Ty, D, /*ForVTable=*/false);
  44. }
  45. /// Emit the conversions required to turn the given value into an
  46. /// integer of the given size.
  47. static Value *EmitToInt(CodeGenFunction &CGF, llvm::Value *V,
  48. QualType T, llvm::IntegerType *IntType) {
  49. V = CGF.EmitToMemory(V, T);
  50. if (V->getType()->isPointerTy())
  51. return CGF.Builder.CreatePtrToInt(V, IntType);
  52. assert(V->getType() == IntType);
  53. return V;
  54. }
  55. static Value *EmitFromInt(CodeGenFunction &CGF, llvm::Value *V,
  56. QualType T, llvm::Type *ResultType) {
  57. V = CGF.EmitFromMemory(V, T);
  58. if (ResultType->isPointerTy())
  59. return CGF.Builder.CreateIntToPtr(V, ResultType);
  60. assert(V->getType() == ResultType);
  61. return V;
  62. }
  63. /// Utility to insert an atomic instruction based on Instrinsic::ID
  64. /// and the expression node.
  65. static RValue EmitBinaryAtomic(CodeGenFunction &CGF,
  66. llvm::AtomicRMWInst::BinOp Kind,
  67. const CallExpr *E) {
  68. QualType T = E->getType();
  69. assert(E->getArg(0)->getType()->isPointerType());
  70. assert(CGF.getContext().hasSameUnqualifiedType(T,
  71. E->getArg(0)->getType()->getPointeeType()));
  72. assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
  73. llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
  74. unsigned AddrSpace =
  75. cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
  76. llvm::IntegerType *IntType =
  77. llvm::IntegerType::get(CGF.getLLVMContext(),
  78. CGF.getContext().getTypeSize(T));
  79. llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
  80. llvm::Value *Args[2];
  81. Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
  82. Args[1] = CGF.EmitScalarExpr(E->getArg(1));
  83. llvm::Type *ValueType = Args[1]->getType();
  84. Args[1] = EmitToInt(CGF, Args[1], T, IntType);
  85. llvm::Value *Result =
  86. CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
  87. llvm::SequentiallyConsistent);
  88. Result = EmitFromInt(CGF, Result, T, ValueType);
  89. return RValue::get(Result);
  90. }
  91. /// Utility to insert an atomic instruction based Instrinsic::ID and
  92. /// the expression node, where the return value is the result of the
  93. /// operation.
  94. static RValue EmitBinaryAtomicPost(CodeGenFunction &CGF,
  95. llvm::AtomicRMWInst::BinOp Kind,
  96. const CallExpr *E,
  97. Instruction::BinaryOps Op) {
  98. QualType T = E->getType();
  99. assert(E->getArg(0)->getType()->isPointerType());
  100. assert(CGF.getContext().hasSameUnqualifiedType(T,
  101. E->getArg(0)->getType()->getPointeeType()));
  102. assert(CGF.getContext().hasSameUnqualifiedType(T, E->getArg(1)->getType()));
  103. llvm::Value *DestPtr = CGF.EmitScalarExpr(E->getArg(0));
  104. unsigned AddrSpace =
  105. cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
  106. llvm::IntegerType *IntType =
  107. llvm::IntegerType::get(CGF.getLLVMContext(),
  108. CGF.getContext().getTypeSize(T));
  109. llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
  110. llvm::Value *Args[2];
  111. Args[1] = CGF.EmitScalarExpr(E->getArg(1));
  112. llvm::Type *ValueType = Args[1]->getType();
  113. Args[1] = EmitToInt(CGF, Args[1], T, IntType);
  114. Args[0] = CGF.Builder.CreateBitCast(DestPtr, IntPtrType);
  115. llvm::Value *Result =
  116. CGF.Builder.CreateAtomicRMW(Kind, Args[0], Args[1],
  117. llvm::SequentiallyConsistent);
  118. Result = CGF.Builder.CreateBinOp(Op, Result, Args[1]);
  119. Result = EmitFromInt(CGF, Result, T, ValueType);
  120. return RValue::get(Result);
  121. }
  122. /// EmitFAbs - Emit a call to fabs/fabsf/fabsl, depending on the type of ValTy,
  123. /// which must be a scalar floating point type.
  124. static Value *EmitFAbs(CodeGenFunction &CGF, Value *V, QualType ValTy) {
  125. const BuiltinType *ValTyP = ValTy->getAs<BuiltinType>();
  126. assert(ValTyP && "isn't scalar fp type!");
  127. StringRef FnName;
  128. switch (ValTyP->getKind()) {
  129. default: llvm_unreachable("Isn't a scalar fp type!");
  130. case BuiltinType::Float: FnName = "fabsf"; break;
  131. case BuiltinType::Double: FnName = "fabs"; break;
  132. case BuiltinType::LongDouble: FnName = "fabsl"; break;
  133. }
  134. // The prototype is something that takes and returns whatever V's type is.
  135. llvm::FunctionType *FT = llvm::FunctionType::get(V->getType(), V->getType(),
  136. false);
  137. llvm::Value *Fn = CGF.CGM.CreateRuntimeFunction(FT, FnName);
  138. return CGF.Builder.CreateCall(Fn, V, "abs");
  139. }
  140. static RValue emitLibraryCall(CodeGenFunction &CGF, const FunctionDecl *Fn,
  141. const CallExpr *E, llvm::Value *calleeValue) {
  142. return CGF.EmitCall(E->getCallee()->getType(), calleeValue,
  143. ReturnValueSlot(), E->arg_begin(), E->arg_end(), Fn);
  144. }
  145. RValue CodeGenFunction::EmitBuiltinExpr(const FunctionDecl *FD,
  146. unsigned BuiltinID, const CallExpr *E) {
  147. // See if we can constant fold this builtin. If so, don't emit it at all.
  148. Expr::EvalResult Result;
  149. if (E->EvaluateAsRValue(Result, CGM.getContext()) &&
  150. !Result.hasSideEffects()) {
  151. if (Result.Val.isInt())
  152. return RValue::get(llvm::ConstantInt::get(getLLVMContext(),
  153. Result.Val.getInt()));
  154. if (Result.Val.isFloat())
  155. return RValue::get(llvm::ConstantFP::get(getLLVMContext(),
  156. Result.Val.getFloat()));
  157. }
  158. switch (BuiltinID) {
  159. default: break; // Handle intrinsics and libm functions below.
  160. case Builtin::BI__builtin___CFStringMakeConstantString:
  161. case Builtin::BI__builtin___NSStringMakeConstantString:
  162. return RValue::get(CGM.EmitConstantExpr(E, E->getType(), 0));
  163. case Builtin::BI__builtin_stdarg_start:
  164. case Builtin::BI__builtin_va_start:
  165. case Builtin::BI__builtin_va_end: {
  166. Value *ArgValue = EmitVAListRef(E->getArg(0));
  167. llvm::Type *DestType = Int8PtrTy;
  168. if (ArgValue->getType() != DestType)
  169. ArgValue = Builder.CreateBitCast(ArgValue, DestType,
  170. ArgValue->getName().data());
  171. Intrinsic::ID inst = (BuiltinID == Builtin::BI__builtin_va_end) ?
  172. Intrinsic::vaend : Intrinsic::vastart;
  173. return RValue::get(Builder.CreateCall(CGM.getIntrinsic(inst), ArgValue));
  174. }
  175. case Builtin::BI__builtin_va_copy: {
  176. Value *DstPtr = EmitVAListRef(E->getArg(0));
  177. Value *SrcPtr = EmitVAListRef(E->getArg(1));
  178. llvm::Type *Type = Int8PtrTy;
  179. DstPtr = Builder.CreateBitCast(DstPtr, Type);
  180. SrcPtr = Builder.CreateBitCast(SrcPtr, Type);
  181. return RValue::get(Builder.CreateCall2(CGM.getIntrinsic(Intrinsic::vacopy),
  182. DstPtr, SrcPtr));
  183. }
  184. case Builtin::BI__builtin_abs:
  185. case Builtin::BI__builtin_labs:
  186. case Builtin::BI__builtin_llabs: {
  187. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  188. Value *NegOp = Builder.CreateNeg(ArgValue, "neg");
  189. Value *CmpResult =
  190. Builder.CreateICmpSGE(ArgValue,
  191. llvm::Constant::getNullValue(ArgValue->getType()),
  192. "abscond");
  193. Value *Result =
  194. Builder.CreateSelect(CmpResult, ArgValue, NegOp, "abs");
  195. return RValue::get(Result);
  196. }
  197. case Builtin::BI__builtin_ctzs:
  198. case Builtin::BI__builtin_ctz:
  199. case Builtin::BI__builtin_ctzl:
  200. case Builtin::BI__builtin_ctzll: {
  201. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  202. llvm::Type *ArgType = ArgValue->getType();
  203. Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
  204. llvm::Type *ResultType = ConvertType(E->getType());
  205. Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
  206. Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
  207. if (Result->getType() != ResultType)
  208. Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
  209. "cast");
  210. return RValue::get(Result);
  211. }
  212. case Builtin::BI__builtin_clzs:
  213. case Builtin::BI__builtin_clz:
  214. case Builtin::BI__builtin_clzl:
  215. case Builtin::BI__builtin_clzll: {
  216. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  217. llvm::Type *ArgType = ArgValue->getType();
  218. Value *F = CGM.getIntrinsic(Intrinsic::ctlz, ArgType);
  219. llvm::Type *ResultType = ConvertType(E->getType());
  220. Value *ZeroUndef = Builder.getInt1(Target.isCLZForZeroUndef());
  221. Value *Result = Builder.CreateCall2(F, ArgValue, ZeroUndef);
  222. if (Result->getType() != ResultType)
  223. Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
  224. "cast");
  225. return RValue::get(Result);
  226. }
  227. case Builtin::BI__builtin_ffs:
  228. case Builtin::BI__builtin_ffsl:
  229. case Builtin::BI__builtin_ffsll: {
  230. // ffs(x) -> x ? cttz(x) + 1 : 0
  231. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  232. llvm::Type *ArgType = ArgValue->getType();
  233. Value *F = CGM.getIntrinsic(Intrinsic::cttz, ArgType);
  234. llvm::Type *ResultType = ConvertType(E->getType());
  235. Value *Tmp = Builder.CreateAdd(Builder.CreateCall2(F, ArgValue,
  236. Builder.getTrue()),
  237. llvm::ConstantInt::get(ArgType, 1));
  238. Value *Zero = llvm::Constant::getNullValue(ArgType);
  239. Value *IsZero = Builder.CreateICmpEQ(ArgValue, Zero, "iszero");
  240. Value *Result = Builder.CreateSelect(IsZero, Zero, Tmp, "ffs");
  241. if (Result->getType() != ResultType)
  242. Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
  243. "cast");
  244. return RValue::get(Result);
  245. }
  246. case Builtin::BI__builtin_parity:
  247. case Builtin::BI__builtin_parityl:
  248. case Builtin::BI__builtin_parityll: {
  249. // parity(x) -> ctpop(x) & 1
  250. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  251. llvm::Type *ArgType = ArgValue->getType();
  252. Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
  253. llvm::Type *ResultType = ConvertType(E->getType());
  254. Value *Tmp = Builder.CreateCall(F, ArgValue);
  255. Value *Result = Builder.CreateAnd(Tmp, llvm::ConstantInt::get(ArgType, 1));
  256. if (Result->getType() != ResultType)
  257. Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
  258. "cast");
  259. return RValue::get(Result);
  260. }
  261. case Builtin::BI__builtin_popcount:
  262. case Builtin::BI__builtin_popcountl:
  263. case Builtin::BI__builtin_popcountll: {
  264. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  265. llvm::Type *ArgType = ArgValue->getType();
  266. Value *F = CGM.getIntrinsic(Intrinsic::ctpop, ArgType);
  267. llvm::Type *ResultType = ConvertType(E->getType());
  268. Value *Result = Builder.CreateCall(F, ArgValue);
  269. if (Result->getType() != ResultType)
  270. Result = Builder.CreateIntCast(Result, ResultType, /*isSigned*/true,
  271. "cast");
  272. return RValue::get(Result);
  273. }
  274. case Builtin::BI__builtin_expect: {
  275. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  276. llvm::Type *ArgType = ArgValue->getType();
  277. Value *FnExpect = CGM.getIntrinsic(Intrinsic::expect, ArgType);
  278. Value *ExpectedValue = EmitScalarExpr(E->getArg(1));
  279. Value *Result = Builder.CreateCall2(FnExpect, ArgValue, ExpectedValue,
  280. "expval");
  281. return RValue::get(Result);
  282. }
  283. case Builtin::BI__builtin_bswap32:
  284. case Builtin::BI__builtin_bswap64: {
  285. Value *ArgValue = EmitScalarExpr(E->getArg(0));
  286. llvm::Type *ArgType = ArgValue->getType();
  287. Value *F = CGM.getIntrinsic(Intrinsic::bswap, ArgType);
  288. return RValue::get(Builder.CreateCall(F, ArgValue));
  289. }
  290. case Builtin::BI__builtin_object_size: {
  291. // We pass this builtin onto the optimizer so that it can
  292. // figure out the object size in more complex cases.
  293. llvm::Type *ResType = ConvertType(E->getType());
  294. // LLVM only supports 0 and 2, make sure that we pass along that
  295. // as a boolean.
  296. Value *Ty = EmitScalarExpr(E->getArg(1));
  297. ConstantInt *CI = dyn_cast<ConstantInt>(Ty);
  298. assert(CI);
  299. uint64_t val = CI->getZExtValue();
  300. CI = ConstantInt::get(Builder.getInt1Ty(), (val & 0x2) >> 1);
  301. Value *F = CGM.getIntrinsic(Intrinsic::objectsize, ResType);
  302. return RValue::get(Builder.CreateCall2(F,
  303. EmitScalarExpr(E->getArg(0)),
  304. CI));
  305. }
  306. case Builtin::BI__builtin_prefetch: {
  307. Value *Locality, *RW, *Address = EmitScalarExpr(E->getArg(0));
  308. // FIXME: Technically these constants should of type 'int', yes?
  309. RW = (E->getNumArgs() > 1) ? EmitScalarExpr(E->getArg(1)) :
  310. llvm::ConstantInt::get(Int32Ty, 0);
  311. Locality = (E->getNumArgs() > 2) ? EmitScalarExpr(E->getArg(2)) :
  312. llvm::ConstantInt::get(Int32Ty, 3);
  313. Value *Data = llvm::ConstantInt::get(Int32Ty, 1);
  314. Value *F = CGM.getIntrinsic(Intrinsic::prefetch);
  315. return RValue::get(Builder.CreateCall4(F, Address, RW, Locality, Data));
  316. }
  317. case Builtin::BI__builtin_trap: {
  318. Value *F = CGM.getIntrinsic(Intrinsic::trap);
  319. return RValue::get(Builder.CreateCall(F));
  320. }
  321. case Builtin::BI__builtin_unreachable: {
  322. if (CatchUndefined)
  323. EmitBranch(getTrapBB());
  324. else
  325. Builder.CreateUnreachable();
  326. // We do need to preserve an insertion point.
  327. EmitBlock(createBasicBlock("unreachable.cont"));
  328. return RValue::get(0);
  329. }
  330. case Builtin::BI__builtin_powi:
  331. case Builtin::BI__builtin_powif:
  332. case Builtin::BI__builtin_powil: {
  333. Value *Base = EmitScalarExpr(E->getArg(0));
  334. Value *Exponent = EmitScalarExpr(E->getArg(1));
  335. llvm::Type *ArgType = Base->getType();
  336. Value *F = CGM.getIntrinsic(Intrinsic::powi, ArgType);
  337. return RValue::get(Builder.CreateCall2(F, Base, Exponent));
  338. }
  339. case Builtin::BI__builtin_isgreater:
  340. case Builtin::BI__builtin_isgreaterequal:
  341. case Builtin::BI__builtin_isless:
  342. case Builtin::BI__builtin_islessequal:
  343. case Builtin::BI__builtin_islessgreater:
  344. case Builtin::BI__builtin_isunordered: {
  345. // Ordered comparisons: we know the arguments to these are matching scalar
  346. // floating point values.
  347. Value *LHS = EmitScalarExpr(E->getArg(0));
  348. Value *RHS = EmitScalarExpr(E->getArg(1));
  349. switch (BuiltinID) {
  350. default: llvm_unreachable("Unknown ordered comparison");
  351. case Builtin::BI__builtin_isgreater:
  352. LHS = Builder.CreateFCmpOGT(LHS, RHS, "cmp");
  353. break;
  354. case Builtin::BI__builtin_isgreaterequal:
  355. LHS = Builder.CreateFCmpOGE(LHS, RHS, "cmp");
  356. break;
  357. case Builtin::BI__builtin_isless:
  358. LHS = Builder.CreateFCmpOLT(LHS, RHS, "cmp");
  359. break;
  360. case Builtin::BI__builtin_islessequal:
  361. LHS = Builder.CreateFCmpOLE(LHS, RHS, "cmp");
  362. break;
  363. case Builtin::BI__builtin_islessgreater:
  364. LHS = Builder.CreateFCmpONE(LHS, RHS, "cmp");
  365. break;
  366. case Builtin::BI__builtin_isunordered:
  367. LHS = Builder.CreateFCmpUNO(LHS, RHS, "cmp");
  368. break;
  369. }
  370. // ZExt bool to int type.
  371. return RValue::get(Builder.CreateZExt(LHS, ConvertType(E->getType())));
  372. }
  373. case Builtin::BI__builtin_isnan: {
  374. Value *V = EmitScalarExpr(E->getArg(0));
  375. V = Builder.CreateFCmpUNO(V, V, "cmp");
  376. return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
  377. }
  378. case Builtin::BI__builtin_isinf: {
  379. // isinf(x) --> fabs(x) == infinity
  380. Value *V = EmitScalarExpr(E->getArg(0));
  381. V = EmitFAbs(*this, V, E->getArg(0)->getType());
  382. V = Builder.CreateFCmpOEQ(V, ConstantFP::getInfinity(V->getType()),"isinf");
  383. return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
  384. }
  385. // TODO: BI__builtin_isinf_sign
  386. // isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0
  387. case Builtin::BI__builtin_isnormal: {
  388. // isnormal(x) --> x == x && fabsf(x) < infinity && fabsf(x) >= float_min
  389. Value *V = EmitScalarExpr(E->getArg(0));
  390. Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
  391. Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
  392. Value *IsLessThanInf =
  393. Builder.CreateFCmpULT(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
  394. APFloat Smallest = APFloat::getSmallestNormalized(
  395. getContext().getFloatTypeSemantics(E->getArg(0)->getType()));
  396. Value *IsNormal =
  397. Builder.CreateFCmpUGE(Abs, ConstantFP::get(V->getContext(), Smallest),
  398. "isnormal");
  399. V = Builder.CreateAnd(Eq, IsLessThanInf, "and");
  400. V = Builder.CreateAnd(V, IsNormal, "and");
  401. return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
  402. }
  403. case Builtin::BI__builtin_isfinite: {
  404. // isfinite(x) --> x == x && fabs(x) != infinity;
  405. Value *V = EmitScalarExpr(E->getArg(0));
  406. Value *Eq = Builder.CreateFCmpOEQ(V, V, "iseq");
  407. Value *Abs = EmitFAbs(*this, V, E->getArg(0)->getType());
  408. Value *IsNotInf =
  409. Builder.CreateFCmpUNE(Abs, ConstantFP::getInfinity(V->getType()),"isinf");
  410. V = Builder.CreateAnd(Eq, IsNotInf, "and");
  411. return RValue::get(Builder.CreateZExt(V, ConvertType(E->getType())));
  412. }
  413. case Builtin::BI__builtin_fpclassify: {
  414. Value *V = EmitScalarExpr(E->getArg(5));
  415. llvm::Type *Ty = ConvertType(E->getArg(5)->getType());
  416. // Create Result
  417. BasicBlock *Begin = Builder.GetInsertBlock();
  418. BasicBlock *End = createBasicBlock("fpclassify_end", this->CurFn);
  419. Builder.SetInsertPoint(End);
  420. PHINode *Result =
  421. Builder.CreatePHI(ConvertType(E->getArg(0)->getType()), 4,
  422. "fpclassify_result");
  423. // if (V==0) return FP_ZERO
  424. Builder.SetInsertPoint(Begin);
  425. Value *IsZero = Builder.CreateFCmpOEQ(V, Constant::getNullValue(Ty),
  426. "iszero");
  427. Value *ZeroLiteral = EmitScalarExpr(E->getArg(4));
  428. BasicBlock *NotZero = createBasicBlock("fpclassify_not_zero", this->CurFn);
  429. Builder.CreateCondBr(IsZero, End, NotZero);
  430. Result->addIncoming(ZeroLiteral, Begin);
  431. // if (V != V) return FP_NAN
  432. Builder.SetInsertPoint(NotZero);
  433. Value *IsNan = Builder.CreateFCmpUNO(V, V, "cmp");
  434. Value *NanLiteral = EmitScalarExpr(E->getArg(0));
  435. BasicBlock *NotNan = createBasicBlock("fpclassify_not_nan", this->CurFn);
  436. Builder.CreateCondBr(IsNan, End, NotNan);
  437. Result->addIncoming(NanLiteral, NotZero);
  438. // if (fabs(V) == infinity) return FP_INFINITY
  439. Builder.SetInsertPoint(NotNan);
  440. Value *VAbs = EmitFAbs(*this, V, E->getArg(5)->getType());
  441. Value *IsInf =
  442. Builder.CreateFCmpOEQ(VAbs, ConstantFP::getInfinity(V->getType()),
  443. "isinf");
  444. Value *InfLiteral = EmitScalarExpr(E->getArg(1));
  445. BasicBlock *NotInf = createBasicBlock("fpclassify_not_inf", this->CurFn);
  446. Builder.CreateCondBr(IsInf, End, NotInf);
  447. Result->addIncoming(InfLiteral, NotNan);
  448. // if (fabs(V) >= MIN_NORMAL) return FP_NORMAL else FP_SUBNORMAL
  449. Builder.SetInsertPoint(NotInf);
  450. APFloat Smallest = APFloat::getSmallestNormalized(
  451. getContext().getFloatTypeSemantics(E->getArg(5)->getType()));
  452. Value *IsNormal =
  453. Builder.CreateFCmpUGE(VAbs, ConstantFP::get(V->getContext(), Smallest),
  454. "isnormal");
  455. Value *NormalResult =
  456. Builder.CreateSelect(IsNormal, EmitScalarExpr(E->getArg(2)),
  457. EmitScalarExpr(E->getArg(3)));
  458. Builder.CreateBr(End);
  459. Result->addIncoming(NormalResult, NotInf);
  460. // return Result
  461. Builder.SetInsertPoint(End);
  462. return RValue::get(Result);
  463. }
  464. case Builtin::BIalloca:
  465. case Builtin::BI__builtin_alloca: {
  466. Value *Size = EmitScalarExpr(E->getArg(0));
  467. return RValue::get(Builder.CreateAlloca(Builder.getInt8Ty(), Size));
  468. }
  469. case Builtin::BIbzero:
  470. case Builtin::BI__builtin_bzero: {
  471. Value *Address = EmitScalarExpr(E->getArg(0));
  472. Value *SizeVal = EmitScalarExpr(E->getArg(1));
  473. Builder.CreateMemSet(Address, Builder.getInt8(0), SizeVal, 1, false);
  474. return RValue::get(Address);
  475. }
  476. case Builtin::BImemcpy:
  477. case Builtin::BI__builtin_memcpy: {
  478. Value *Address = EmitScalarExpr(E->getArg(0));
  479. Value *SrcAddr = EmitScalarExpr(E->getArg(1));
  480. Value *SizeVal = EmitScalarExpr(E->getArg(2));
  481. Builder.CreateMemCpy(Address, SrcAddr, SizeVal, 1, false);
  482. return RValue::get(Address);
  483. }
  484. case Builtin::BI__builtin___memcpy_chk: {
  485. // fold __builtin_memcpy_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
  486. llvm::APSInt Size, DstSize;
  487. if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
  488. !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
  489. break;
  490. if (Size.ugt(DstSize))
  491. break;
  492. Value *Dest = EmitScalarExpr(E->getArg(0));
  493. Value *Src = EmitScalarExpr(E->getArg(1));
  494. Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
  495. Builder.CreateMemCpy(Dest, Src, SizeVal, 1, false);
  496. return RValue::get(Dest);
  497. }
  498. case Builtin::BI__builtin_objc_memmove_collectable: {
  499. Value *Address = EmitScalarExpr(E->getArg(0));
  500. Value *SrcAddr = EmitScalarExpr(E->getArg(1));
  501. Value *SizeVal = EmitScalarExpr(E->getArg(2));
  502. CGM.getObjCRuntime().EmitGCMemmoveCollectable(*this,
  503. Address, SrcAddr, SizeVal);
  504. return RValue::get(Address);
  505. }
  506. case Builtin::BI__builtin___memmove_chk: {
  507. // fold __builtin_memmove_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
  508. llvm::APSInt Size, DstSize;
  509. if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
  510. !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
  511. break;
  512. if (Size.ugt(DstSize))
  513. break;
  514. Value *Dest = EmitScalarExpr(E->getArg(0));
  515. Value *Src = EmitScalarExpr(E->getArg(1));
  516. Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
  517. Builder.CreateMemMove(Dest, Src, SizeVal, 1, false);
  518. return RValue::get(Dest);
  519. }
  520. case Builtin::BImemmove:
  521. case Builtin::BI__builtin_memmove: {
  522. Value *Address = EmitScalarExpr(E->getArg(0));
  523. Value *SrcAddr = EmitScalarExpr(E->getArg(1));
  524. Value *SizeVal = EmitScalarExpr(E->getArg(2));
  525. Builder.CreateMemMove(Address, SrcAddr, SizeVal, 1, false);
  526. return RValue::get(Address);
  527. }
  528. case Builtin::BImemset:
  529. case Builtin::BI__builtin_memset: {
  530. Value *Address = EmitScalarExpr(E->getArg(0));
  531. Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
  532. Builder.getInt8Ty());
  533. Value *SizeVal = EmitScalarExpr(E->getArg(2));
  534. Builder.CreateMemSet(Address, ByteVal, SizeVal, 1, false);
  535. return RValue::get(Address);
  536. }
  537. case Builtin::BI__builtin___memset_chk: {
  538. // fold __builtin_memset_chk(x, y, cst1, cst2) to memset iff cst1<=cst2.
  539. llvm::APSInt Size, DstSize;
  540. if (!E->getArg(2)->EvaluateAsInt(Size, CGM.getContext()) ||
  541. !E->getArg(3)->EvaluateAsInt(DstSize, CGM.getContext()))
  542. break;
  543. if (Size.ugt(DstSize))
  544. break;
  545. Value *Address = EmitScalarExpr(E->getArg(0));
  546. Value *ByteVal = Builder.CreateTrunc(EmitScalarExpr(E->getArg(1)),
  547. Builder.getInt8Ty());
  548. Value *SizeVal = llvm::ConstantInt::get(Builder.getContext(), Size);
  549. Builder.CreateMemSet(Address, ByteVal, SizeVal, 1, false);
  550. return RValue::get(Address);
  551. }
  552. case Builtin::BI__builtin_dwarf_cfa: {
  553. // The offset in bytes from the first argument to the CFA.
  554. //
  555. // Why on earth is this in the frontend? Is there any reason at
  556. // all that the backend can't reasonably determine this while
  557. // lowering llvm.eh.dwarf.cfa()?
  558. //
  559. // TODO: If there's a satisfactory reason, add a target hook for
  560. // this instead of hard-coding 0, which is correct for most targets.
  561. int32_t Offset = 0;
  562. Value *F = CGM.getIntrinsic(Intrinsic::eh_dwarf_cfa);
  563. return RValue::get(Builder.CreateCall(F,
  564. llvm::ConstantInt::get(Int32Ty, Offset)));
  565. }
  566. case Builtin::BI__builtin_return_address: {
  567. Value *Depth = EmitScalarExpr(E->getArg(0));
  568. Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
  569. Value *F = CGM.getIntrinsic(Intrinsic::returnaddress);
  570. return RValue::get(Builder.CreateCall(F, Depth));
  571. }
  572. case Builtin::BI__builtin_frame_address: {
  573. Value *Depth = EmitScalarExpr(E->getArg(0));
  574. Depth = Builder.CreateIntCast(Depth, Int32Ty, false);
  575. Value *F = CGM.getIntrinsic(Intrinsic::frameaddress);
  576. return RValue::get(Builder.CreateCall(F, Depth));
  577. }
  578. case Builtin::BI__builtin_extract_return_addr: {
  579. Value *Address = EmitScalarExpr(E->getArg(0));
  580. Value *Result = getTargetHooks().decodeReturnAddress(*this, Address);
  581. return RValue::get(Result);
  582. }
  583. case Builtin::BI__builtin_frob_return_addr: {
  584. Value *Address = EmitScalarExpr(E->getArg(0));
  585. Value *Result = getTargetHooks().encodeReturnAddress(*this, Address);
  586. return RValue::get(Result);
  587. }
  588. case Builtin::BI__builtin_dwarf_sp_column: {
  589. llvm::IntegerType *Ty
  590. = cast<llvm::IntegerType>(ConvertType(E->getType()));
  591. int Column = getTargetHooks().getDwarfEHStackPointer(CGM);
  592. if (Column == -1) {
  593. CGM.ErrorUnsupported(E, "__builtin_dwarf_sp_column");
  594. return RValue::get(llvm::UndefValue::get(Ty));
  595. }
  596. return RValue::get(llvm::ConstantInt::get(Ty, Column, true));
  597. }
  598. case Builtin::BI__builtin_init_dwarf_reg_size_table: {
  599. Value *Address = EmitScalarExpr(E->getArg(0));
  600. if (getTargetHooks().initDwarfEHRegSizeTable(*this, Address))
  601. CGM.ErrorUnsupported(E, "__builtin_init_dwarf_reg_size_table");
  602. return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
  603. }
  604. case Builtin::BI__builtin_eh_return: {
  605. Value *Int = EmitScalarExpr(E->getArg(0));
  606. Value *Ptr = EmitScalarExpr(E->getArg(1));
  607. llvm::IntegerType *IntTy = cast<llvm::IntegerType>(Int->getType());
  608. assert((IntTy->getBitWidth() == 32 || IntTy->getBitWidth() == 64) &&
  609. "LLVM's __builtin_eh_return only supports 32- and 64-bit variants");
  610. Value *F = CGM.getIntrinsic(IntTy->getBitWidth() == 32
  611. ? Intrinsic::eh_return_i32
  612. : Intrinsic::eh_return_i64);
  613. Builder.CreateCall2(F, Int, Ptr);
  614. Builder.CreateUnreachable();
  615. // We do need to preserve an insertion point.
  616. EmitBlock(createBasicBlock("builtin_eh_return.cont"));
  617. return RValue::get(0);
  618. }
  619. case Builtin::BI__builtin_unwind_init: {
  620. Value *F = CGM.getIntrinsic(Intrinsic::eh_unwind_init);
  621. return RValue::get(Builder.CreateCall(F));
  622. }
  623. case Builtin::BI__builtin_extend_pointer: {
  624. // Extends a pointer to the size of an _Unwind_Word, which is
  625. // uint64_t on all platforms. Generally this gets poked into a
  626. // register and eventually used as an address, so if the
  627. // addressing registers are wider than pointers and the platform
  628. // doesn't implicitly ignore high-order bits when doing
  629. // addressing, we need to make sure we zext / sext based on
  630. // the platform's expectations.
  631. //
  632. // See: http://gcc.gnu.org/ml/gcc-bugs/2002-02/msg00237.html
  633. // Cast the pointer to intptr_t.
  634. Value *Ptr = EmitScalarExpr(E->getArg(0));
  635. Value *Result = Builder.CreatePtrToInt(Ptr, IntPtrTy, "extend.cast");
  636. // If that's 64 bits, we're done.
  637. if (IntPtrTy->getBitWidth() == 64)
  638. return RValue::get(Result);
  639. // Otherwise, ask the codegen data what to do.
  640. if (getTargetHooks().extendPointerWithSExt())
  641. return RValue::get(Builder.CreateSExt(Result, Int64Ty, "extend.sext"));
  642. else
  643. return RValue::get(Builder.CreateZExt(Result, Int64Ty, "extend.zext"));
  644. }
  645. case Builtin::BI__builtin_setjmp: {
  646. // Buffer is a void**.
  647. Value *Buf = EmitScalarExpr(E->getArg(0));
  648. // Store the frame pointer to the setjmp buffer.
  649. Value *FrameAddr =
  650. Builder.CreateCall(CGM.getIntrinsic(Intrinsic::frameaddress),
  651. ConstantInt::get(Int32Ty, 0));
  652. Builder.CreateStore(FrameAddr, Buf);
  653. // Store the stack pointer to the setjmp buffer.
  654. Value *StackAddr =
  655. Builder.CreateCall(CGM.getIntrinsic(Intrinsic::stacksave));
  656. Value *StackSaveSlot =
  657. Builder.CreateGEP(Buf, ConstantInt::get(Int32Ty, 2));
  658. Builder.CreateStore(StackAddr, StackSaveSlot);
  659. // Call LLVM's EH setjmp, which is lightweight.
  660. Value *F = CGM.getIntrinsic(Intrinsic::eh_sjlj_setjmp);
  661. Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
  662. return RValue::get(Builder.CreateCall(F, Buf));
  663. }
  664. case Builtin::BI__builtin_longjmp: {
  665. Value *Buf = EmitScalarExpr(E->getArg(0));
  666. Buf = Builder.CreateBitCast(Buf, Int8PtrTy);
  667. // Call LLVM's EH longjmp, which is lightweight.
  668. Builder.CreateCall(CGM.getIntrinsic(Intrinsic::eh_sjlj_longjmp), Buf);
  669. // longjmp doesn't return; mark this as unreachable.
  670. Builder.CreateUnreachable();
  671. // We do need to preserve an insertion point.
  672. EmitBlock(createBasicBlock("longjmp.cont"));
  673. return RValue::get(0);
  674. }
  675. case Builtin::BI__sync_fetch_and_add:
  676. case Builtin::BI__sync_fetch_and_sub:
  677. case Builtin::BI__sync_fetch_and_or:
  678. case Builtin::BI__sync_fetch_and_and:
  679. case Builtin::BI__sync_fetch_and_xor:
  680. case Builtin::BI__sync_add_and_fetch:
  681. case Builtin::BI__sync_sub_and_fetch:
  682. case Builtin::BI__sync_and_and_fetch:
  683. case Builtin::BI__sync_or_and_fetch:
  684. case Builtin::BI__sync_xor_and_fetch:
  685. case Builtin::BI__sync_val_compare_and_swap:
  686. case Builtin::BI__sync_bool_compare_and_swap:
  687. case Builtin::BI__sync_lock_test_and_set:
  688. case Builtin::BI__sync_lock_release:
  689. case Builtin::BI__sync_swap:
  690. llvm_unreachable("Shouldn't make it through sema");
  691. case Builtin::BI__sync_fetch_and_add_1:
  692. case Builtin::BI__sync_fetch_and_add_2:
  693. case Builtin::BI__sync_fetch_and_add_4:
  694. case Builtin::BI__sync_fetch_and_add_8:
  695. case Builtin::BI__sync_fetch_and_add_16:
  696. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Add, E);
  697. case Builtin::BI__sync_fetch_and_sub_1:
  698. case Builtin::BI__sync_fetch_and_sub_2:
  699. case Builtin::BI__sync_fetch_and_sub_4:
  700. case Builtin::BI__sync_fetch_and_sub_8:
  701. case Builtin::BI__sync_fetch_and_sub_16:
  702. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Sub, E);
  703. case Builtin::BI__sync_fetch_and_or_1:
  704. case Builtin::BI__sync_fetch_and_or_2:
  705. case Builtin::BI__sync_fetch_and_or_4:
  706. case Builtin::BI__sync_fetch_and_or_8:
  707. case Builtin::BI__sync_fetch_and_or_16:
  708. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Or, E);
  709. case Builtin::BI__sync_fetch_and_and_1:
  710. case Builtin::BI__sync_fetch_and_and_2:
  711. case Builtin::BI__sync_fetch_and_and_4:
  712. case Builtin::BI__sync_fetch_and_and_8:
  713. case Builtin::BI__sync_fetch_and_and_16:
  714. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::And, E);
  715. case Builtin::BI__sync_fetch_and_xor_1:
  716. case Builtin::BI__sync_fetch_and_xor_2:
  717. case Builtin::BI__sync_fetch_and_xor_4:
  718. case Builtin::BI__sync_fetch_and_xor_8:
  719. case Builtin::BI__sync_fetch_and_xor_16:
  720. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xor, E);
  721. // Clang extensions: not overloaded yet.
  722. case Builtin::BI__sync_fetch_and_min:
  723. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Min, E);
  724. case Builtin::BI__sync_fetch_and_max:
  725. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Max, E);
  726. case Builtin::BI__sync_fetch_and_umin:
  727. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMin, E);
  728. case Builtin::BI__sync_fetch_and_umax:
  729. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::UMax, E);
  730. case Builtin::BI__sync_add_and_fetch_1:
  731. case Builtin::BI__sync_add_and_fetch_2:
  732. case Builtin::BI__sync_add_and_fetch_4:
  733. case Builtin::BI__sync_add_and_fetch_8:
  734. case Builtin::BI__sync_add_and_fetch_16:
  735. return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Add, E,
  736. llvm::Instruction::Add);
  737. case Builtin::BI__sync_sub_and_fetch_1:
  738. case Builtin::BI__sync_sub_and_fetch_2:
  739. case Builtin::BI__sync_sub_and_fetch_4:
  740. case Builtin::BI__sync_sub_and_fetch_8:
  741. case Builtin::BI__sync_sub_and_fetch_16:
  742. return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Sub, E,
  743. llvm::Instruction::Sub);
  744. case Builtin::BI__sync_and_and_fetch_1:
  745. case Builtin::BI__sync_and_and_fetch_2:
  746. case Builtin::BI__sync_and_and_fetch_4:
  747. case Builtin::BI__sync_and_and_fetch_8:
  748. case Builtin::BI__sync_and_and_fetch_16:
  749. return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::And, E,
  750. llvm::Instruction::And);
  751. case Builtin::BI__sync_or_and_fetch_1:
  752. case Builtin::BI__sync_or_and_fetch_2:
  753. case Builtin::BI__sync_or_and_fetch_4:
  754. case Builtin::BI__sync_or_and_fetch_8:
  755. case Builtin::BI__sync_or_and_fetch_16:
  756. return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Or, E,
  757. llvm::Instruction::Or);
  758. case Builtin::BI__sync_xor_and_fetch_1:
  759. case Builtin::BI__sync_xor_and_fetch_2:
  760. case Builtin::BI__sync_xor_and_fetch_4:
  761. case Builtin::BI__sync_xor_and_fetch_8:
  762. case Builtin::BI__sync_xor_and_fetch_16:
  763. return EmitBinaryAtomicPost(*this, llvm::AtomicRMWInst::Xor, E,
  764. llvm::Instruction::Xor);
  765. case Builtin::BI__sync_val_compare_and_swap_1:
  766. case Builtin::BI__sync_val_compare_and_swap_2:
  767. case Builtin::BI__sync_val_compare_and_swap_4:
  768. case Builtin::BI__sync_val_compare_and_swap_8:
  769. case Builtin::BI__sync_val_compare_and_swap_16: {
  770. QualType T = E->getType();
  771. llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
  772. unsigned AddrSpace =
  773. cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
  774. llvm::IntegerType *IntType =
  775. llvm::IntegerType::get(getLLVMContext(),
  776. getContext().getTypeSize(T));
  777. llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
  778. Value *Args[3];
  779. Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
  780. Args[1] = EmitScalarExpr(E->getArg(1));
  781. llvm::Type *ValueType = Args[1]->getType();
  782. Args[1] = EmitToInt(*this, Args[1], T, IntType);
  783. Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
  784. Value *Result = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
  785. llvm::SequentiallyConsistent);
  786. Result = EmitFromInt(*this, Result, T, ValueType);
  787. return RValue::get(Result);
  788. }
  789. case Builtin::BI__sync_bool_compare_and_swap_1:
  790. case Builtin::BI__sync_bool_compare_and_swap_2:
  791. case Builtin::BI__sync_bool_compare_and_swap_4:
  792. case Builtin::BI__sync_bool_compare_and_swap_8:
  793. case Builtin::BI__sync_bool_compare_and_swap_16: {
  794. QualType T = E->getArg(1)->getType();
  795. llvm::Value *DestPtr = EmitScalarExpr(E->getArg(0));
  796. unsigned AddrSpace =
  797. cast<llvm::PointerType>(DestPtr->getType())->getAddressSpace();
  798. llvm::IntegerType *IntType =
  799. llvm::IntegerType::get(getLLVMContext(),
  800. getContext().getTypeSize(T));
  801. llvm::Type *IntPtrType = IntType->getPointerTo(AddrSpace);
  802. Value *Args[3];
  803. Args[0] = Builder.CreateBitCast(DestPtr, IntPtrType);
  804. Args[1] = EmitToInt(*this, EmitScalarExpr(E->getArg(1)), T, IntType);
  805. Args[2] = EmitToInt(*this, EmitScalarExpr(E->getArg(2)), T, IntType);
  806. Value *OldVal = Args[1];
  807. Value *PrevVal = Builder.CreateAtomicCmpXchg(Args[0], Args[1], Args[2],
  808. llvm::SequentiallyConsistent);
  809. Value *Result = Builder.CreateICmpEQ(PrevVal, OldVal);
  810. // zext bool to int.
  811. Result = Builder.CreateZExt(Result, ConvertType(E->getType()));
  812. return RValue::get(Result);
  813. }
  814. case Builtin::BI__sync_swap_1:
  815. case Builtin::BI__sync_swap_2:
  816. case Builtin::BI__sync_swap_4:
  817. case Builtin::BI__sync_swap_8:
  818. case Builtin::BI__sync_swap_16:
  819. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
  820. case Builtin::BI__sync_lock_test_and_set_1:
  821. case Builtin::BI__sync_lock_test_and_set_2:
  822. case Builtin::BI__sync_lock_test_and_set_4:
  823. case Builtin::BI__sync_lock_test_and_set_8:
  824. case Builtin::BI__sync_lock_test_and_set_16:
  825. return EmitBinaryAtomic(*this, llvm::AtomicRMWInst::Xchg, E);
  826. case Builtin::BI__sync_lock_release_1:
  827. case Builtin::BI__sync_lock_release_2:
  828. case Builtin::BI__sync_lock_release_4:
  829. case Builtin::BI__sync_lock_release_8:
  830. case Builtin::BI__sync_lock_release_16: {
  831. Value *Ptr = EmitScalarExpr(E->getArg(0));
  832. llvm::Type *ElLLVMTy =
  833. cast<llvm::PointerType>(Ptr->getType())->getElementType();
  834. llvm::StoreInst *Store =
  835. Builder.CreateStore(llvm::Constant::getNullValue(ElLLVMTy), Ptr);
  836. QualType ElTy = E->getArg(0)->getType()->getPointeeType();
  837. CharUnits StoreSize = getContext().getTypeSizeInChars(ElTy);
  838. Store->setAlignment(StoreSize.getQuantity());
  839. Store->setAtomic(llvm::Release);
  840. return RValue::get(0);
  841. }
  842. case Builtin::BI__sync_synchronize: {
  843. // We assume this is supposed to correspond to a C++0x-style
  844. // sequentially-consistent fence (i.e. this is only usable for
  845. // synchonization, not device I/O or anything like that). This intrinsic
  846. // is really badly designed in the sense that in theory, there isn't
  847. // any way to safely use it... but in practice, it mostly works
  848. // to use it with non-atomic loads and stores to get acquire/release
  849. // semantics.
  850. Builder.CreateFence(llvm::SequentiallyConsistent);
  851. return RValue::get(0);
  852. }
  853. case Builtin::BI__atomic_thread_fence:
  854. case Builtin::BI__atomic_signal_fence: {
  855. llvm::SynchronizationScope Scope;
  856. if (BuiltinID == Builtin::BI__atomic_signal_fence)
  857. Scope = llvm::SingleThread;
  858. else
  859. Scope = llvm::CrossThread;
  860. Value *Order = EmitScalarExpr(E->getArg(0));
  861. if (isa<llvm::ConstantInt>(Order)) {
  862. int ord = cast<llvm::ConstantInt>(Order)->getZExtValue();
  863. switch (ord) {
  864. case 0: // memory_order_relaxed
  865. default: // invalid order
  866. break;
  867. case 1: // memory_order_consume
  868. case 2: // memory_order_acquire
  869. Builder.CreateFence(llvm::Acquire, Scope);
  870. break;
  871. case 3: // memory_order_release
  872. Builder.CreateFence(llvm::Release, Scope);
  873. break;
  874. case 4: // memory_order_acq_rel
  875. Builder.CreateFence(llvm::AcquireRelease, Scope);
  876. break;
  877. case 5: // memory_order_seq_cst
  878. Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
  879. break;
  880. }
  881. return RValue::get(0);
  882. }
  883. llvm::BasicBlock *AcquireBB, *ReleaseBB, *AcqRelBB, *SeqCstBB;
  884. AcquireBB = createBasicBlock("acquire", CurFn);
  885. ReleaseBB = createBasicBlock("release", CurFn);
  886. AcqRelBB = createBasicBlock("acqrel", CurFn);
  887. SeqCstBB = createBasicBlock("seqcst", CurFn);
  888. llvm::BasicBlock *ContBB = createBasicBlock("atomic.continue", CurFn);
  889. Order = Builder.CreateIntCast(Order, Builder.getInt32Ty(), false);
  890. llvm::SwitchInst *SI = Builder.CreateSwitch(Order, ContBB);
  891. Builder.SetInsertPoint(AcquireBB);
  892. Builder.CreateFence(llvm::Acquire, Scope);
  893. Builder.CreateBr(ContBB);
  894. SI->addCase(Builder.getInt32(1), AcquireBB);
  895. SI->addCase(Builder.getInt32(2), AcquireBB);
  896. Builder.SetInsertPoint(ReleaseBB);
  897. Builder.CreateFence(llvm::Release, Scope);
  898. Builder.CreateBr(ContBB);
  899. SI->addCase(Builder.getInt32(3), ReleaseBB);
  900. Builder.SetInsertPoint(AcqRelBB);
  901. Builder.CreateFence(llvm::AcquireRelease, Scope);
  902. Builder.CreateBr(ContBB);
  903. SI->addCase(Builder.getInt32(4), AcqRelBB);
  904. Builder.SetInsertPoint(SeqCstBB);
  905. Builder.CreateFence(llvm::SequentiallyConsistent, Scope);
  906. Builder.CreateBr(ContBB);
  907. SI->addCase(Builder.getInt32(5), SeqCstBB);
  908. Builder.SetInsertPoint(ContBB);
  909. return RValue::get(0);
  910. }
  911. // Library functions with special handling.
  912. case Builtin::BIsqrt:
  913. case Builtin::BIsqrtf:
  914. case Builtin::BIsqrtl: {
  915. // TODO: there is currently no set of optimizer flags
  916. // sufficient for us to rewrite sqrt to @llvm.sqrt.
  917. // -fmath-errno=0 is not good enough; we need finiteness.
  918. // We could probably precondition the call with an ult
  919. // against 0, but is that worth the complexity?
  920. break;
  921. }
  922. case Builtin::BIpow:
  923. case Builtin::BIpowf:
  924. case Builtin::BIpowl: {
  925. // Rewrite sqrt to intrinsic if allowed.
  926. if (!FD->hasAttr<ConstAttr>())
  927. break;
  928. Value *Base = EmitScalarExpr(E->getArg(0));
  929. Value *Exponent = EmitScalarExpr(E->getArg(1));
  930. llvm::Type *ArgType = Base->getType();
  931. Value *F = CGM.getIntrinsic(Intrinsic::pow, ArgType);
  932. return RValue::get(Builder.CreateCall2(F, Base, Exponent));
  933. }
  934. case Builtin::BIfma:
  935. case Builtin::BIfmaf:
  936. case Builtin::BIfmal:
  937. case Builtin::BI__builtin_fma:
  938. case Builtin::BI__builtin_fmaf:
  939. case Builtin::BI__builtin_fmal: {
  940. // Rewrite fma to intrinsic.
  941. Value *FirstArg = EmitScalarExpr(E->getArg(0));
  942. llvm::Type *ArgType = FirstArg->getType();
  943. Value *F = CGM.getIntrinsic(Intrinsic::fma, ArgType);
  944. return RValue::get(Builder.CreateCall3(F, FirstArg,
  945. EmitScalarExpr(E->getArg(1)),
  946. EmitScalarExpr(E->getArg(2))));
  947. }
  948. case Builtin::BI__builtin_signbit:
  949. case Builtin::BI__builtin_signbitf:
  950. case Builtin::BI__builtin_signbitl: {
  951. LLVMContext &C = CGM.getLLVMContext();
  952. Value *Arg = EmitScalarExpr(E->getArg(0));
  953. llvm::Type *ArgTy = Arg->getType();
  954. if (ArgTy->isPPC_FP128Ty())
  955. break; // FIXME: I'm not sure what the right implementation is here.
  956. int ArgWidth = ArgTy->getPrimitiveSizeInBits();
  957. llvm::Type *ArgIntTy = llvm::IntegerType::get(C, ArgWidth);
  958. Value *BCArg = Builder.CreateBitCast(Arg, ArgIntTy);
  959. Value *ZeroCmp = llvm::Constant::getNullValue(ArgIntTy);
  960. Value *Result = Builder.CreateICmpSLT(BCArg, ZeroCmp);
  961. return RValue::get(Builder.CreateZExt(Result, ConvertType(E->getType())));
  962. }
  963. case Builtin::BI__builtin_annotation: {
  964. llvm::Value *AnnVal = EmitScalarExpr(E->getArg(0));
  965. llvm::Value *F = CGM.getIntrinsic(llvm::Intrinsic::annotation,
  966. AnnVal->getType());
  967. // Get the annotation string, go through casts. Sema requires this to be a
  968. // non-wide string literal, potentially casted, so the cast<> is safe.
  969. const Expr *AnnotationStrExpr = E->getArg(1)->IgnoreParenCasts();
  970. llvm::StringRef Str = cast<StringLiteral>(AnnotationStrExpr)->getString();
  971. return RValue::get(EmitAnnotationCall(F, AnnVal, Str, E->getExprLoc()));
  972. }
  973. }
  974. // If this is an alias for a lib function (e.g. __builtin_sin), emit
  975. // the call using the normal call path, but using the unmangled
  976. // version of the function name.
  977. if (getContext().BuiltinInfo.isLibFunction(BuiltinID))
  978. return emitLibraryCall(*this, FD, E,
  979. CGM.getBuiltinLibFunction(FD, BuiltinID));
  980. // If this is a predefined lib function (e.g. malloc), emit the call
  981. // using exactly the normal call path.
  982. if (getContext().BuiltinInfo.isPredefinedLibFunction(BuiltinID))
  983. return emitLibraryCall(*this, FD, E, EmitScalarExpr(E->getCallee()));
  984. // See if we have a target specific intrinsic.
  985. const char *Name = getContext().BuiltinInfo.GetName(BuiltinID);
  986. Intrinsic::ID IntrinsicID = Intrinsic::not_intrinsic;
  987. if (const char *Prefix =
  988. llvm::Triple::getArchTypePrefix(Target.getTriple().getArch()))
  989. IntrinsicID = Intrinsic::getIntrinsicForGCCBuiltin(Prefix, Name);
  990. if (IntrinsicID != Intrinsic::not_intrinsic) {
  991. SmallVector<Value*, 16> Args;
  992. // Find out if any arguments are required to be integer constant
  993. // expressions.
  994. unsigned ICEArguments = 0;
  995. ASTContext::GetBuiltinTypeError Error;
  996. getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
  997. assert(Error == ASTContext::GE_None && "Should not codegen an error");
  998. Function *F = CGM.getIntrinsic(IntrinsicID);
  999. llvm::FunctionType *FTy = F->getFunctionType();
  1000. for (unsigned i = 0, e = E->getNumArgs(); i != e; ++i) {
  1001. Value *ArgValue;
  1002. // If this is a normal argument, just emit it as a scalar.
  1003. if ((ICEArguments & (1 << i)) == 0) {
  1004. ArgValue = EmitScalarExpr(E->getArg(i));
  1005. } else {
  1006. // If this is required to be a constant, constant fold it so that we
  1007. // know that the generated intrinsic gets a ConstantInt.
  1008. llvm::APSInt Result;
  1009. bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result,getContext());
  1010. assert(IsConst && "Constant arg isn't actually constant?");
  1011. (void)IsConst;
  1012. ArgValue = llvm::ConstantInt::get(getLLVMContext(), Result);
  1013. }
  1014. // If the intrinsic arg type is different from the builtin arg type
  1015. // we need to do a bit cast.
  1016. llvm::Type *PTy = FTy->getParamType(i);
  1017. if (PTy != ArgValue->getType()) {
  1018. assert(PTy->canLosslesslyBitCastTo(FTy->getParamType(i)) &&
  1019. "Must be able to losslessly bit cast to param");
  1020. ArgValue = Builder.CreateBitCast(ArgValue, PTy);
  1021. }
  1022. Args.push_back(ArgValue);
  1023. }
  1024. Value *V = Builder.CreateCall(F, Args);
  1025. QualType BuiltinRetType = E->getType();
  1026. llvm::Type *RetTy = VoidTy;
  1027. if (!BuiltinRetType->isVoidType())
  1028. RetTy = ConvertType(BuiltinRetType);
  1029. if (RetTy != V->getType()) {
  1030. assert(V->getType()->canLosslesslyBitCastTo(RetTy) &&
  1031. "Must be able to losslessly bit cast result type");
  1032. V = Builder.CreateBitCast(V, RetTy);
  1033. }
  1034. return RValue::get(V);
  1035. }
  1036. // See if we have a target specific builtin that needs to be lowered.
  1037. if (Value *V = EmitTargetBuiltinExpr(BuiltinID, E))
  1038. return RValue::get(V);
  1039. ErrorUnsupported(E, "builtin function");
  1040. // Unknown builtin, for now just dump it out and return undef.
  1041. if (hasAggregateLLVMType(E->getType()))
  1042. return RValue::getAggregate(CreateMemTemp(E->getType()));
  1043. return RValue::get(llvm::UndefValue::get(ConvertType(E->getType())));
  1044. }
  1045. Value *CodeGenFunction::EmitTargetBuiltinExpr(unsigned BuiltinID,
  1046. const CallExpr *E) {
  1047. switch (Target.getTriple().getArch()) {
  1048. case llvm::Triple::arm:
  1049. case llvm::Triple::thumb:
  1050. return EmitARMBuiltinExpr(BuiltinID, E);
  1051. case llvm::Triple::x86:
  1052. case llvm::Triple::x86_64:
  1053. return EmitX86BuiltinExpr(BuiltinID, E);
  1054. case llvm::Triple::ppc:
  1055. case llvm::Triple::ppc64:
  1056. return EmitPPCBuiltinExpr(BuiltinID, E);
  1057. case llvm::Triple::hexagon:
  1058. return EmitHexagonBuiltinExpr(BuiltinID, E);
  1059. default:
  1060. return 0;
  1061. }
  1062. }
  1063. static llvm::VectorType *GetNeonType(CodeGenFunction *CGF,
  1064. NeonTypeFlags TypeFlags) {
  1065. int IsQuad = TypeFlags.isQuad();
  1066. switch (TypeFlags.getEltType()) {
  1067. case NeonTypeFlags::Int8:
  1068. case NeonTypeFlags::Poly8:
  1069. return llvm::VectorType::get(CGF->Int8Ty, 8 << IsQuad);
  1070. case NeonTypeFlags::Int16:
  1071. case NeonTypeFlags::Poly16:
  1072. case NeonTypeFlags::Float16:
  1073. return llvm::VectorType::get(CGF->Int16Ty, 4 << IsQuad);
  1074. case NeonTypeFlags::Int32:
  1075. return llvm::VectorType::get(CGF->Int32Ty, 2 << IsQuad);
  1076. case NeonTypeFlags::Int64:
  1077. return llvm::VectorType::get(CGF->Int64Ty, 1 << IsQuad);
  1078. case NeonTypeFlags::Float32:
  1079. return llvm::VectorType::get(CGF->FloatTy, 2 << IsQuad);
  1080. }
  1081. llvm_unreachable("Invalid NeonTypeFlags element type!");
  1082. }
  1083. Value *CodeGenFunction::EmitNeonSplat(Value *V, Constant *C) {
  1084. unsigned nElts = cast<llvm::VectorType>(V->getType())->getNumElements();
  1085. Value* SV = llvm::ConstantVector::getSplat(nElts, C);
  1086. return Builder.CreateShuffleVector(V, V, SV, "lane");
  1087. }
  1088. Value *CodeGenFunction::EmitNeonCall(Function *F, SmallVectorImpl<Value*> &Ops,
  1089. const char *name,
  1090. unsigned shift, bool rightshift) {
  1091. unsigned j = 0;
  1092. for (Function::const_arg_iterator ai = F->arg_begin(), ae = F->arg_end();
  1093. ai != ae; ++ai, ++j)
  1094. if (shift > 0 && shift == j)
  1095. Ops[j] = EmitNeonShiftVector(Ops[j], ai->getType(), rightshift);
  1096. else
  1097. Ops[j] = Builder.CreateBitCast(Ops[j], ai->getType(), name);
  1098. return Builder.CreateCall(F, Ops, name);
  1099. }
  1100. Value *CodeGenFunction::EmitNeonShiftVector(Value *V, llvm::Type *Ty,
  1101. bool neg) {
  1102. int SV = cast<ConstantInt>(V)->getSExtValue();
  1103. llvm::VectorType *VTy = cast<llvm::VectorType>(Ty);
  1104. llvm::Constant *C = ConstantInt::get(VTy->getElementType(), neg ? -SV : SV);
  1105. return llvm::ConstantVector::getSplat(VTy->getNumElements(), C);
  1106. }
  1107. /// GetPointeeAlignment - Given an expression with a pointer type, find the
  1108. /// alignment of the type referenced by the pointer. Skip over implicit
  1109. /// casts.
  1110. static Value *GetPointeeAlignment(CodeGenFunction &CGF, const Expr *Addr) {
  1111. unsigned Align = 1;
  1112. // Check if the type is a pointer. The implicit cast operand might not be.
  1113. while (Addr->getType()->isPointerType()) {
  1114. QualType PtTy = Addr->getType()->getPointeeType();
  1115. unsigned NewA = CGF.getContext().getTypeAlignInChars(PtTy).getQuantity();
  1116. if (NewA > Align)
  1117. Align = NewA;
  1118. // If the address is an implicit cast, repeat with the cast operand.
  1119. if (const ImplicitCastExpr *CastAddr = dyn_cast<ImplicitCastExpr>(Addr)) {
  1120. Addr = CastAddr->getSubExpr();
  1121. continue;
  1122. }
  1123. break;
  1124. }
  1125. return llvm::ConstantInt::get(CGF.Int32Ty, Align);
  1126. }
  1127. Value *CodeGenFunction::EmitARMBuiltinExpr(unsigned BuiltinID,
  1128. const CallExpr *E) {
  1129. if (BuiltinID == ARM::BI__clear_cache) {
  1130. const FunctionDecl *FD = E->getDirectCallee();
  1131. // Oddly people write this call without args on occasion and gcc accepts
  1132. // it - it's also marked as varargs in the description file.
  1133. SmallVector<Value*, 2> Ops;
  1134. for (unsigned i = 0; i < E->getNumArgs(); i++)
  1135. Ops.push_back(EmitScalarExpr(E->getArg(i)));
  1136. llvm::Type *Ty = CGM.getTypes().ConvertType(FD->getType());
  1137. llvm::FunctionType *FTy = cast<llvm::FunctionType>(Ty);
  1138. StringRef Name = FD->getName();
  1139. return Builder.CreateCall(CGM.CreateRuntimeFunction(FTy, Name), Ops);
  1140. }
  1141. if (BuiltinID == ARM::BI__builtin_arm_ldrexd) {
  1142. Function *F = CGM.getIntrinsic(Intrinsic::arm_ldrexd);
  1143. Value *LdPtr = EmitScalarExpr(E->getArg(0));
  1144. Value *Val = Builder.CreateCall(F, LdPtr, "ldrexd");
  1145. Value *Val0 = Builder.CreateExtractValue(Val, 1);
  1146. Value *Val1 = Builder.CreateExtractValue(Val, 0);
  1147. Val0 = Builder.CreateZExt(Val0, Int64Ty);
  1148. Val1 = Builder.CreateZExt(Val1, Int64Ty);
  1149. Value *ShiftCst = llvm::ConstantInt::get(Int64Ty, 32);
  1150. Val = Builder.CreateShl(Val0, ShiftCst, "shl", true /* nuw */);
  1151. return Builder.CreateOr(Val, Val1);
  1152. }
  1153. if (BuiltinID == ARM::BI__builtin_arm_strexd) {
  1154. Function *F = CGM.getIntrinsic(Intrinsic::arm_strexd);
  1155. llvm::Type *STy = llvm::StructType::get(Int32Ty, Int32Ty, NULL);
  1156. Value *One = llvm::ConstantInt::get(Int32Ty, 1);
  1157. Value *Tmp = Builder.CreateAlloca(Int64Ty, One);
  1158. Value *Val = EmitScalarExpr(E->getArg(0));
  1159. Builder.CreateStore(Val, Tmp);
  1160. Value *LdPtr = Builder.CreateBitCast(Tmp,llvm::PointerType::getUnqual(STy));
  1161. Val = Builder.CreateLoad(LdPtr);
  1162. Value *Arg0 = Builder.CreateExtractValue(Val, 0);
  1163. Value *Arg1 = Builder.CreateExtractValue(Val, 1);
  1164. Value *StPtr = EmitScalarExpr(E->getArg(1));
  1165. return Builder.CreateCall3(F, Arg0, Arg1, StPtr, "strexd");
  1166. }
  1167. SmallVector<Value*, 4> Ops;
  1168. for (unsigned i = 0, e = E->getNumArgs() - 1; i != e; i++)
  1169. Ops.push_back(EmitScalarExpr(E->getArg(i)));
  1170. // vget_lane and vset_lane are not overloaded and do not have an extra
  1171. // argument that specifies the vector type.
  1172. switch (BuiltinID) {
  1173. default: break;
  1174. case ARM::BI__builtin_neon_vget_lane_i8:
  1175. case ARM::BI__builtin_neon_vget_lane_i16:
  1176. case ARM::BI__builtin_neon_vget_lane_i32:
  1177. case ARM::BI__builtin_neon_vget_lane_i64:
  1178. case ARM::BI__builtin_neon_vget_lane_f32:
  1179. case ARM::BI__builtin_neon_vgetq_lane_i8:
  1180. case ARM::BI__builtin_neon_vgetq_lane_i16:
  1181. case ARM::BI__builtin_neon_vgetq_lane_i32:
  1182. case ARM::BI__builtin_neon_vgetq_lane_i64:
  1183. case ARM::BI__builtin_neon_vgetq_lane_f32:
  1184. return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)),
  1185. "vget_lane");
  1186. case ARM::BI__builtin_neon_vset_lane_i8:
  1187. case ARM::BI__builtin_neon_vset_lane_i16:
  1188. case ARM::BI__builtin_neon_vset_lane_i32:
  1189. case ARM::BI__builtin_neon_vset_lane_i64:
  1190. case ARM::BI__builtin_neon_vset_lane_f32:
  1191. case ARM::BI__builtin_neon_vsetq_lane_i8:
  1192. case ARM::BI__builtin_neon_vsetq_lane_i16:
  1193. case ARM::BI__builtin_neon_vsetq_lane_i32:
  1194. case ARM::BI__builtin_neon_vsetq_lane_i64:
  1195. case ARM::BI__builtin_neon_vsetq_lane_f32:
  1196. Ops.push_back(EmitScalarExpr(E->getArg(2)));
  1197. return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane");
  1198. }
  1199. // Get the last argument, which specifies the vector type.
  1200. llvm::APSInt Result;
  1201. const Expr *Arg = E->getArg(E->getNumArgs()-1);
  1202. if (!Arg->isIntegerConstantExpr(Result, getContext()))
  1203. return 0;
  1204. if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f ||
  1205. BuiltinID == ARM::BI__builtin_arm_vcvtr_d) {
  1206. // Determine the overloaded type of this builtin.
  1207. llvm::Type *Ty;
  1208. if (BuiltinID == ARM::BI__builtin_arm_vcvtr_f)
  1209. Ty = FloatTy;
  1210. else
  1211. Ty = DoubleTy;
  1212. // Determine whether this is an unsigned conversion or not.
  1213. bool usgn = Result.getZExtValue() == 1;
  1214. unsigned Int = usgn ? Intrinsic::arm_vcvtru : Intrinsic::arm_vcvtr;
  1215. // Call the appropriate intrinsic.
  1216. Function *F = CGM.getIntrinsic(Int, Ty);
  1217. return Builder.CreateCall(F, Ops, "vcvtr");
  1218. }
  1219. // Determine the type of this overloaded NEON intrinsic.
  1220. NeonTypeFlags Type(Result.getZExtValue());
  1221. bool usgn = Type.isUnsigned();
  1222. bool quad = Type.isQuad();
  1223. bool rightShift = false;
  1224. llvm::VectorType *VTy = GetNeonType(this, Type);
  1225. llvm::Type *Ty = VTy;
  1226. if (!Ty)
  1227. return 0;
  1228. unsigned Int;
  1229. switch (BuiltinID) {
  1230. default: return 0;
  1231. case ARM::BI__builtin_neon_vabd_v:
  1232. case ARM::BI__builtin_neon_vabdq_v:
  1233. Int = usgn ? Intrinsic::arm_neon_vabdu : Intrinsic::arm_neon_vabds;
  1234. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vabd");
  1235. case ARM::BI__builtin_neon_vabs_v:
  1236. case ARM::BI__builtin_neon_vabsq_v:
  1237. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vabs, Ty),
  1238. Ops, "vabs");
  1239. case ARM::BI__builtin_neon_vaddhn_v:
  1240. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vaddhn, Ty),
  1241. Ops, "vaddhn");
  1242. case ARM::BI__builtin_neon_vcale_v:
  1243. std::swap(Ops[0], Ops[1]);
  1244. case ARM::BI__builtin_neon_vcage_v: {
  1245. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacged);
  1246. return EmitNeonCall(F, Ops, "vcage");
  1247. }
  1248. case ARM::BI__builtin_neon_vcaleq_v:
  1249. std::swap(Ops[0], Ops[1]);
  1250. case ARM::BI__builtin_neon_vcageq_v: {
  1251. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgeq);
  1252. return EmitNeonCall(F, Ops, "vcage");
  1253. }
  1254. case ARM::BI__builtin_neon_vcalt_v:
  1255. std::swap(Ops[0], Ops[1]);
  1256. case ARM::BI__builtin_neon_vcagt_v: {
  1257. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtd);
  1258. return EmitNeonCall(F, Ops, "vcagt");
  1259. }
  1260. case ARM::BI__builtin_neon_vcaltq_v:
  1261. std::swap(Ops[0], Ops[1]);
  1262. case ARM::BI__builtin_neon_vcagtq_v: {
  1263. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vacgtq);
  1264. return EmitNeonCall(F, Ops, "vcagt");
  1265. }
  1266. case ARM::BI__builtin_neon_vcls_v:
  1267. case ARM::BI__builtin_neon_vclsq_v: {
  1268. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcls, Ty);
  1269. return EmitNeonCall(F, Ops, "vcls");
  1270. }
  1271. case ARM::BI__builtin_neon_vclz_v:
  1272. case ARM::BI__builtin_neon_vclzq_v: {
  1273. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vclz, Ty);
  1274. return EmitNeonCall(F, Ops, "vclz");
  1275. }
  1276. case ARM::BI__builtin_neon_vcnt_v:
  1277. case ARM::BI__builtin_neon_vcntq_v: {
  1278. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcnt, Ty);
  1279. return EmitNeonCall(F, Ops, "vcnt");
  1280. }
  1281. case ARM::BI__builtin_neon_vcvt_f16_v: {
  1282. assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
  1283. "unexpected vcvt_f16_v builtin");
  1284. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvtfp2hf);
  1285. return EmitNeonCall(F, Ops, "vcvt");
  1286. }
  1287. case ARM::BI__builtin_neon_vcvt_f32_f16: {
  1288. assert(Type.getEltType() == NeonTypeFlags::Float16 && !quad &&
  1289. "unexpected vcvt_f32_f16 builtin");
  1290. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vcvthf2fp);
  1291. return EmitNeonCall(F, Ops, "vcvt");
  1292. }
  1293. case ARM::BI__builtin_neon_vcvt_f32_v:
  1294. case ARM::BI__builtin_neon_vcvtq_f32_v:
  1295. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1296. Ty = GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
  1297. return usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt")
  1298. : Builder.CreateSIToFP(Ops[0], Ty, "vcvt");
  1299. case ARM::BI__builtin_neon_vcvt_s32_v:
  1300. case ARM::BI__builtin_neon_vcvt_u32_v:
  1301. case ARM::BI__builtin_neon_vcvtq_s32_v:
  1302. case ARM::BI__builtin_neon_vcvtq_u32_v: {
  1303. llvm::Type *FloatTy =
  1304. GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
  1305. Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy);
  1306. return usgn ? Builder.CreateFPToUI(Ops[0], Ty, "vcvt")
  1307. : Builder.CreateFPToSI(Ops[0], Ty, "vcvt");
  1308. }
  1309. case ARM::BI__builtin_neon_vcvt_n_f32_v:
  1310. case ARM::BI__builtin_neon_vcvtq_n_f32_v: {
  1311. llvm::Type *FloatTy =
  1312. GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
  1313. llvm::Type *Tys[2] = { FloatTy, Ty };
  1314. Int = usgn ? Intrinsic::arm_neon_vcvtfxu2fp
  1315. : Intrinsic::arm_neon_vcvtfxs2fp;
  1316. Function *F = CGM.getIntrinsic(Int, Tys);
  1317. return EmitNeonCall(F, Ops, "vcvt_n");
  1318. }
  1319. case ARM::BI__builtin_neon_vcvt_n_s32_v:
  1320. case ARM::BI__builtin_neon_vcvt_n_u32_v:
  1321. case ARM::BI__builtin_neon_vcvtq_n_s32_v:
  1322. case ARM::BI__builtin_neon_vcvtq_n_u32_v: {
  1323. llvm::Type *FloatTy =
  1324. GetNeonType(this, NeonTypeFlags(NeonTypeFlags::Float32, false, quad));
  1325. llvm::Type *Tys[2] = { Ty, FloatTy };
  1326. Int = usgn ? Intrinsic::arm_neon_vcvtfp2fxu
  1327. : Intrinsic::arm_neon_vcvtfp2fxs;
  1328. Function *F = CGM.getIntrinsic(Int, Tys);
  1329. return EmitNeonCall(F, Ops, "vcvt_n");
  1330. }
  1331. case ARM::BI__builtin_neon_vext_v:
  1332. case ARM::BI__builtin_neon_vextq_v: {
  1333. int CV = cast<ConstantInt>(Ops[2])->getSExtValue();
  1334. SmallVector<Constant*, 16> Indices;
  1335. for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
  1336. Indices.push_back(ConstantInt::get(Int32Ty, i+CV));
  1337. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1338. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1339. Value *SV = llvm::ConstantVector::get(Indices);
  1340. return Builder.CreateShuffleVector(Ops[0], Ops[1], SV, "vext");
  1341. }
  1342. case ARM::BI__builtin_neon_vhadd_v:
  1343. case ARM::BI__builtin_neon_vhaddq_v:
  1344. Int = usgn ? Intrinsic::arm_neon_vhaddu : Intrinsic::arm_neon_vhadds;
  1345. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhadd");
  1346. case ARM::BI__builtin_neon_vhsub_v:
  1347. case ARM::BI__builtin_neon_vhsubq_v:
  1348. Int = usgn ? Intrinsic::arm_neon_vhsubu : Intrinsic::arm_neon_vhsubs;
  1349. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vhsub");
  1350. case ARM::BI__builtin_neon_vld1_v:
  1351. case ARM::BI__builtin_neon_vld1q_v:
  1352. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1353. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vld1, Ty),
  1354. Ops, "vld1");
  1355. case ARM::BI__builtin_neon_vld1_lane_v:
  1356. case ARM::BI__builtin_neon_vld1q_lane_v: {
  1357. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1358. Ty = llvm::PointerType::getUnqual(VTy->getElementType());
  1359. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1360. LoadInst *Ld = Builder.CreateLoad(Ops[0]);
  1361. Value *Align = GetPointeeAlignment(*this, E->getArg(0));
  1362. Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
  1363. return Builder.CreateInsertElement(Ops[1], Ld, Ops[2], "vld1_lane");
  1364. }
  1365. case ARM::BI__builtin_neon_vld1_dup_v:
  1366. case ARM::BI__builtin_neon_vld1q_dup_v: {
  1367. Value *V = UndefValue::get(Ty);
  1368. Ty = llvm::PointerType::getUnqual(VTy->getElementType());
  1369. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1370. LoadInst *Ld = Builder.CreateLoad(Ops[0]);
  1371. Value *Align = GetPointeeAlignment(*this, E->getArg(0));
  1372. Ld->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
  1373. llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
  1374. Ops[0] = Builder.CreateInsertElement(V, Ld, CI);
  1375. return EmitNeonSplat(Ops[0], CI);
  1376. }
  1377. case ARM::BI__builtin_neon_vld2_v:
  1378. case ARM::BI__builtin_neon_vld2q_v: {
  1379. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2, Ty);
  1380. Value *Align = GetPointeeAlignment(*this, E->getArg(1));
  1381. Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld2");
  1382. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1383. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1384. return Builder.CreateStore(Ops[1], Ops[0]);
  1385. }
  1386. case ARM::BI__builtin_neon_vld3_v:
  1387. case ARM::BI__builtin_neon_vld3q_v: {
  1388. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3, Ty);
  1389. Value *Align = GetPointeeAlignment(*this, E->getArg(1));
  1390. Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld3");
  1391. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1392. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1393. return Builder.CreateStore(Ops[1], Ops[0]);
  1394. }
  1395. case ARM::BI__builtin_neon_vld4_v:
  1396. case ARM::BI__builtin_neon_vld4q_v: {
  1397. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4, Ty);
  1398. Value *Align = GetPointeeAlignment(*this, E->getArg(1));
  1399. Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld4");
  1400. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1401. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1402. return Builder.CreateStore(Ops[1], Ops[0]);
  1403. }
  1404. case ARM::BI__builtin_neon_vld2_lane_v:
  1405. case ARM::BI__builtin_neon_vld2q_lane_v: {
  1406. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld2lane, Ty);
  1407. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1408. Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
  1409. Ops.push_back(GetPointeeAlignment(*this, E->getArg(1)));
  1410. Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld2_lane");
  1411. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1412. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1413. return Builder.CreateStore(Ops[1], Ops[0]);
  1414. }
  1415. case ARM::BI__builtin_neon_vld3_lane_v:
  1416. case ARM::BI__builtin_neon_vld3q_lane_v: {
  1417. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld3lane, Ty);
  1418. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1419. Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
  1420. Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
  1421. Ops.push_back(GetPointeeAlignment(*this, E->getArg(1)));
  1422. Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
  1423. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1424. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1425. return Builder.CreateStore(Ops[1], Ops[0]);
  1426. }
  1427. case ARM::BI__builtin_neon_vld4_lane_v:
  1428. case ARM::BI__builtin_neon_vld4q_lane_v: {
  1429. Function *F = CGM.getIntrinsic(Intrinsic::arm_neon_vld4lane, Ty);
  1430. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1431. Ops[3] = Builder.CreateBitCast(Ops[3], Ty);
  1432. Ops[4] = Builder.CreateBitCast(Ops[4], Ty);
  1433. Ops[5] = Builder.CreateBitCast(Ops[5], Ty);
  1434. Ops.push_back(GetPointeeAlignment(*this, E->getArg(1)));
  1435. Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane");
  1436. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1437. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1438. return Builder.CreateStore(Ops[1], Ops[0]);
  1439. }
  1440. case ARM::BI__builtin_neon_vld2_dup_v:
  1441. case ARM::BI__builtin_neon_vld3_dup_v:
  1442. case ARM::BI__builtin_neon_vld4_dup_v: {
  1443. // Handle 64-bit elements as a special-case. There is no "dup" needed.
  1444. if (VTy->getElementType()->getPrimitiveSizeInBits() == 64) {
  1445. switch (BuiltinID) {
  1446. case ARM::BI__builtin_neon_vld2_dup_v:
  1447. Int = Intrinsic::arm_neon_vld2;
  1448. break;
  1449. case ARM::BI__builtin_neon_vld3_dup_v:
  1450. Int = Intrinsic::arm_neon_vld2;
  1451. break;
  1452. case ARM::BI__builtin_neon_vld4_dup_v:
  1453. Int = Intrinsic::arm_neon_vld2;
  1454. break;
  1455. default: llvm_unreachable("unknown vld_dup intrinsic?");
  1456. }
  1457. Function *F = CGM.getIntrinsic(Int, Ty);
  1458. Value *Align = GetPointeeAlignment(*this, E->getArg(1));
  1459. Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld_dup");
  1460. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1461. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1462. return Builder.CreateStore(Ops[1], Ops[0]);
  1463. }
  1464. switch (BuiltinID) {
  1465. case ARM::BI__builtin_neon_vld2_dup_v:
  1466. Int = Intrinsic::arm_neon_vld2lane;
  1467. break;
  1468. case ARM::BI__builtin_neon_vld3_dup_v:
  1469. Int = Intrinsic::arm_neon_vld2lane;
  1470. break;
  1471. case ARM::BI__builtin_neon_vld4_dup_v:
  1472. Int = Intrinsic::arm_neon_vld2lane;
  1473. break;
  1474. default: llvm_unreachable("unknown vld_dup intrinsic?");
  1475. }
  1476. Function *F = CGM.getIntrinsic(Int, Ty);
  1477. llvm::StructType *STy = cast<llvm::StructType>(F->getReturnType());
  1478. SmallVector<Value*, 6> Args;
  1479. Args.push_back(Ops[1]);
  1480. Args.append(STy->getNumElements(), UndefValue::get(Ty));
  1481. llvm::Constant *CI = ConstantInt::get(Int32Ty, 0);
  1482. Args.push_back(CI);
  1483. Args.push_back(GetPointeeAlignment(*this, E->getArg(1)));
  1484. Ops[1] = Builder.CreateCall(F, Args, "vld_dup");
  1485. // splat lane 0 to all elts in each vector of the result.
  1486. for (unsigned i = 0, e = STy->getNumElements(); i != e; ++i) {
  1487. Value *Val = Builder.CreateExtractValue(Ops[1], i);
  1488. Value *Elt = Builder.CreateBitCast(Val, Ty);
  1489. Elt = EmitNeonSplat(Elt, CI);
  1490. Elt = Builder.CreateBitCast(Elt, Val->getType());
  1491. Ops[1] = Builder.CreateInsertValue(Ops[1], Elt, i);
  1492. }
  1493. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1494. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1495. return Builder.CreateStore(Ops[1], Ops[0]);
  1496. }
  1497. case ARM::BI__builtin_neon_vmax_v:
  1498. case ARM::BI__builtin_neon_vmaxq_v:
  1499. Int = usgn ? Intrinsic::arm_neon_vmaxu : Intrinsic::arm_neon_vmaxs;
  1500. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmax");
  1501. case ARM::BI__builtin_neon_vmin_v:
  1502. case ARM::BI__builtin_neon_vminq_v:
  1503. Int = usgn ? Intrinsic::arm_neon_vminu : Intrinsic::arm_neon_vmins;
  1504. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmin");
  1505. case ARM::BI__builtin_neon_vmovl_v: {
  1506. llvm::Type *DTy =llvm::VectorType::getTruncatedElementVectorType(VTy);
  1507. Ops[0] = Builder.CreateBitCast(Ops[0], DTy);
  1508. if (usgn)
  1509. return Builder.CreateZExt(Ops[0], Ty, "vmovl");
  1510. return Builder.CreateSExt(Ops[0], Ty, "vmovl");
  1511. }
  1512. case ARM::BI__builtin_neon_vmovn_v: {
  1513. llvm::Type *QTy = llvm::VectorType::getExtendedElementVectorType(VTy);
  1514. Ops[0] = Builder.CreateBitCast(Ops[0], QTy);
  1515. return Builder.CreateTrunc(Ops[0], Ty, "vmovn");
  1516. }
  1517. case ARM::BI__builtin_neon_vmul_v:
  1518. case ARM::BI__builtin_neon_vmulq_v:
  1519. assert(Type.isPoly() && "vmul builtin only supported for polynomial types");
  1520. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vmulp, Ty),
  1521. Ops, "vmul");
  1522. case ARM::BI__builtin_neon_vmull_v:
  1523. Int = usgn ? Intrinsic::arm_neon_vmullu : Intrinsic::arm_neon_vmulls;
  1524. Int = Type.isPoly() ? (unsigned)Intrinsic::arm_neon_vmullp : Int;
  1525. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull");
  1526. case ARM::BI__builtin_neon_vpadal_v:
  1527. case ARM::BI__builtin_neon_vpadalq_v: {
  1528. Int = usgn ? Intrinsic::arm_neon_vpadalu : Intrinsic::arm_neon_vpadals;
  1529. // The source operand type has twice as many elements of half the size.
  1530. unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
  1531. llvm::Type *EltTy =
  1532. llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
  1533. llvm::Type *NarrowTy =
  1534. llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
  1535. llvm::Type *Tys[2] = { Ty, NarrowTy };
  1536. return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpadal");
  1537. }
  1538. case ARM::BI__builtin_neon_vpadd_v:
  1539. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vpadd, Ty),
  1540. Ops, "vpadd");
  1541. case ARM::BI__builtin_neon_vpaddl_v:
  1542. case ARM::BI__builtin_neon_vpaddlq_v: {
  1543. Int = usgn ? Intrinsic::arm_neon_vpaddlu : Intrinsic::arm_neon_vpaddls;
  1544. // The source operand type has twice as many elements of half the size.
  1545. unsigned EltBits = VTy->getElementType()->getPrimitiveSizeInBits();
  1546. llvm::Type *EltTy = llvm::IntegerType::get(getLLVMContext(), EltBits / 2);
  1547. llvm::Type *NarrowTy =
  1548. llvm::VectorType::get(EltTy, VTy->getNumElements() * 2);
  1549. llvm::Type *Tys[2] = { Ty, NarrowTy };
  1550. return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpaddl");
  1551. }
  1552. case ARM::BI__builtin_neon_vpmax_v:
  1553. Int = usgn ? Intrinsic::arm_neon_vpmaxu : Intrinsic::arm_neon_vpmaxs;
  1554. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmax");
  1555. case ARM::BI__builtin_neon_vpmin_v:
  1556. Int = usgn ? Intrinsic::arm_neon_vpminu : Intrinsic::arm_neon_vpmins;
  1557. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmin");
  1558. case ARM::BI__builtin_neon_vqabs_v:
  1559. case ARM::BI__builtin_neon_vqabsq_v:
  1560. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqabs, Ty),
  1561. Ops, "vqabs");
  1562. case ARM::BI__builtin_neon_vqadd_v:
  1563. case ARM::BI__builtin_neon_vqaddq_v:
  1564. Int = usgn ? Intrinsic::arm_neon_vqaddu : Intrinsic::arm_neon_vqadds;
  1565. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqadd");
  1566. case ARM::BI__builtin_neon_vqdmlal_v:
  1567. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlal, Ty),
  1568. Ops, "vqdmlal");
  1569. case ARM::BI__builtin_neon_vqdmlsl_v:
  1570. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmlsl, Ty),
  1571. Ops, "vqdmlsl");
  1572. case ARM::BI__builtin_neon_vqdmulh_v:
  1573. case ARM::BI__builtin_neon_vqdmulhq_v:
  1574. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmulh, Ty),
  1575. Ops, "vqdmulh");
  1576. case ARM::BI__builtin_neon_vqdmull_v:
  1577. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqdmull, Ty),
  1578. Ops, "vqdmull");
  1579. case ARM::BI__builtin_neon_vqmovn_v:
  1580. Int = usgn ? Intrinsic::arm_neon_vqmovnu : Intrinsic::arm_neon_vqmovns;
  1581. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqmovn");
  1582. case ARM::BI__builtin_neon_vqmovun_v:
  1583. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqmovnsu, Ty),
  1584. Ops, "vqdmull");
  1585. case ARM::BI__builtin_neon_vqneg_v:
  1586. case ARM::BI__builtin_neon_vqnegq_v:
  1587. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqneg, Ty),
  1588. Ops, "vqneg");
  1589. case ARM::BI__builtin_neon_vqrdmulh_v:
  1590. case ARM::BI__builtin_neon_vqrdmulhq_v:
  1591. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrdmulh, Ty),
  1592. Ops, "vqrdmulh");
  1593. case ARM::BI__builtin_neon_vqrshl_v:
  1594. case ARM::BI__builtin_neon_vqrshlq_v:
  1595. Int = usgn ? Intrinsic::arm_neon_vqrshiftu : Intrinsic::arm_neon_vqrshifts;
  1596. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshl");
  1597. case ARM::BI__builtin_neon_vqrshrn_n_v:
  1598. Int = usgn ? Intrinsic::arm_neon_vqrshiftnu : Intrinsic::arm_neon_vqrshiftns;
  1599. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n",
  1600. 1, true);
  1601. case ARM::BI__builtin_neon_vqrshrun_n_v:
  1602. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqrshiftnsu, Ty),
  1603. Ops, "vqrshrun_n", 1, true);
  1604. case ARM::BI__builtin_neon_vqshl_v:
  1605. case ARM::BI__builtin_neon_vqshlq_v:
  1606. Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
  1607. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl");
  1608. case ARM::BI__builtin_neon_vqshl_n_v:
  1609. case ARM::BI__builtin_neon_vqshlq_n_v:
  1610. Int = usgn ? Intrinsic::arm_neon_vqshiftu : Intrinsic::arm_neon_vqshifts;
  1611. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl_n",
  1612. 1, false);
  1613. case ARM::BI__builtin_neon_vqshlu_n_v:
  1614. case ARM::BI__builtin_neon_vqshluq_n_v:
  1615. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftsu, Ty),
  1616. Ops, "vqshlu", 1, false);
  1617. case ARM::BI__builtin_neon_vqshrn_n_v:
  1618. Int = usgn ? Intrinsic::arm_neon_vqshiftnu : Intrinsic::arm_neon_vqshiftns;
  1619. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n",
  1620. 1, true);
  1621. case ARM::BI__builtin_neon_vqshrun_n_v:
  1622. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vqshiftnsu, Ty),
  1623. Ops, "vqshrun_n", 1, true);
  1624. case ARM::BI__builtin_neon_vqsub_v:
  1625. case ARM::BI__builtin_neon_vqsubq_v:
  1626. Int = usgn ? Intrinsic::arm_neon_vqsubu : Intrinsic::arm_neon_vqsubs;
  1627. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqsub");
  1628. case ARM::BI__builtin_neon_vraddhn_v:
  1629. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vraddhn, Ty),
  1630. Ops, "vraddhn");
  1631. case ARM::BI__builtin_neon_vrecpe_v:
  1632. case ARM::BI__builtin_neon_vrecpeq_v:
  1633. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecpe, Ty),
  1634. Ops, "vrecpe");
  1635. case ARM::BI__builtin_neon_vrecps_v:
  1636. case ARM::BI__builtin_neon_vrecpsq_v:
  1637. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrecps, Ty),
  1638. Ops, "vrecps");
  1639. case ARM::BI__builtin_neon_vrhadd_v:
  1640. case ARM::BI__builtin_neon_vrhaddq_v:
  1641. Int = usgn ? Intrinsic::arm_neon_vrhaddu : Intrinsic::arm_neon_vrhadds;
  1642. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrhadd");
  1643. case ARM::BI__builtin_neon_vrshl_v:
  1644. case ARM::BI__builtin_neon_vrshlq_v:
  1645. Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
  1646. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshl");
  1647. case ARM::BI__builtin_neon_vrshrn_n_v:
  1648. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrshiftn, Ty),
  1649. Ops, "vrshrn_n", 1, true);
  1650. case ARM::BI__builtin_neon_vrshr_n_v:
  1651. case ARM::BI__builtin_neon_vrshrq_n_v:
  1652. Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
  1653. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshr_n", 1, true);
  1654. case ARM::BI__builtin_neon_vrsqrte_v:
  1655. case ARM::BI__builtin_neon_vrsqrteq_v:
  1656. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrte, Ty),
  1657. Ops, "vrsqrte");
  1658. case ARM::BI__builtin_neon_vrsqrts_v:
  1659. case ARM::BI__builtin_neon_vrsqrtsq_v:
  1660. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsqrts, Ty),
  1661. Ops, "vrsqrts");
  1662. case ARM::BI__builtin_neon_vrsra_n_v:
  1663. case ARM::BI__builtin_neon_vrsraq_n_v:
  1664. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1665. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1666. Ops[2] = EmitNeonShiftVector(Ops[2], Ty, true);
  1667. Int = usgn ? Intrinsic::arm_neon_vrshiftu : Intrinsic::arm_neon_vrshifts;
  1668. Ops[1] = Builder.CreateCall2(CGM.getIntrinsic(Int, Ty), Ops[1], Ops[2]);
  1669. return Builder.CreateAdd(Ops[0], Ops[1], "vrsra_n");
  1670. case ARM::BI__builtin_neon_vrsubhn_v:
  1671. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vrsubhn, Ty),
  1672. Ops, "vrsubhn");
  1673. case ARM::BI__builtin_neon_vshl_v:
  1674. case ARM::BI__builtin_neon_vshlq_v:
  1675. Int = usgn ? Intrinsic::arm_neon_vshiftu : Intrinsic::arm_neon_vshifts;
  1676. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshl");
  1677. case ARM::BI__builtin_neon_vshll_n_v:
  1678. Int = usgn ? Intrinsic::arm_neon_vshiftlu : Intrinsic::arm_neon_vshiftls;
  1679. return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vshll", 1);
  1680. case ARM::BI__builtin_neon_vshl_n_v:
  1681. case ARM::BI__builtin_neon_vshlq_n_v:
  1682. Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
  1683. return Builder.CreateShl(Builder.CreateBitCast(Ops[0],Ty), Ops[1], "vshl_n");
  1684. case ARM::BI__builtin_neon_vshrn_n_v:
  1685. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftn, Ty),
  1686. Ops, "vshrn_n", 1, true);
  1687. case ARM::BI__builtin_neon_vshr_n_v:
  1688. case ARM::BI__builtin_neon_vshrq_n_v:
  1689. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1690. Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false);
  1691. if (usgn)
  1692. return Builder.CreateLShr(Ops[0], Ops[1], "vshr_n");
  1693. else
  1694. return Builder.CreateAShr(Ops[0], Ops[1], "vshr_n");
  1695. case ARM::BI__builtin_neon_vsri_n_v:
  1696. case ARM::BI__builtin_neon_vsriq_n_v:
  1697. rightShift = true;
  1698. case ARM::BI__builtin_neon_vsli_n_v:
  1699. case ARM::BI__builtin_neon_vsliq_n_v:
  1700. Ops[2] = EmitNeonShiftVector(Ops[2], Ty, rightShift);
  1701. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vshiftins, Ty),
  1702. Ops, "vsli_n");
  1703. case ARM::BI__builtin_neon_vsra_n_v:
  1704. case ARM::BI__builtin_neon_vsraq_n_v:
  1705. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1706. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1707. Ops[2] = EmitNeonShiftVector(Ops[2], Ty, false);
  1708. if (usgn)
  1709. Ops[1] = Builder.CreateLShr(Ops[1], Ops[2], "vsra_n");
  1710. else
  1711. Ops[1] = Builder.CreateAShr(Ops[1], Ops[2], "vsra_n");
  1712. return Builder.CreateAdd(Ops[0], Ops[1]);
  1713. case ARM::BI__builtin_neon_vst1_v:
  1714. case ARM::BI__builtin_neon_vst1q_v:
  1715. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1716. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst1, Ty),
  1717. Ops, "");
  1718. case ARM::BI__builtin_neon_vst1_lane_v:
  1719. case ARM::BI__builtin_neon_vst1q_lane_v: {
  1720. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1721. Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]);
  1722. Ty = llvm::PointerType::getUnqual(Ops[1]->getType());
  1723. StoreInst *St = Builder.CreateStore(Ops[1],
  1724. Builder.CreateBitCast(Ops[0], Ty));
  1725. Value *Align = GetPointeeAlignment(*this, E->getArg(0));
  1726. St->setAlignment(cast<ConstantInt>(Align)->getZExtValue());
  1727. return St;
  1728. }
  1729. case ARM::BI__builtin_neon_vst2_v:
  1730. case ARM::BI__builtin_neon_vst2q_v:
  1731. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1732. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2, Ty),
  1733. Ops, "");
  1734. case ARM::BI__builtin_neon_vst2_lane_v:
  1735. case ARM::BI__builtin_neon_vst2q_lane_v:
  1736. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1737. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst2lane, Ty),
  1738. Ops, "");
  1739. case ARM::BI__builtin_neon_vst3_v:
  1740. case ARM::BI__builtin_neon_vst3q_v:
  1741. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1742. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3, Ty),
  1743. Ops, "");
  1744. case ARM::BI__builtin_neon_vst3_lane_v:
  1745. case ARM::BI__builtin_neon_vst3q_lane_v:
  1746. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1747. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst3lane, Ty),
  1748. Ops, "");
  1749. case ARM::BI__builtin_neon_vst4_v:
  1750. case ARM::BI__builtin_neon_vst4q_v:
  1751. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1752. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4, Ty),
  1753. Ops, "");
  1754. case ARM::BI__builtin_neon_vst4_lane_v:
  1755. case ARM::BI__builtin_neon_vst4q_lane_v:
  1756. Ops.push_back(GetPointeeAlignment(*this, E->getArg(0)));
  1757. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vst4lane, Ty),
  1758. Ops, "");
  1759. case ARM::BI__builtin_neon_vsubhn_v:
  1760. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vsubhn, Ty),
  1761. Ops, "vsubhn");
  1762. case ARM::BI__builtin_neon_vtbl1_v:
  1763. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl1),
  1764. Ops, "vtbl1");
  1765. case ARM::BI__builtin_neon_vtbl2_v:
  1766. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl2),
  1767. Ops, "vtbl2");
  1768. case ARM::BI__builtin_neon_vtbl3_v:
  1769. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl3),
  1770. Ops, "vtbl3");
  1771. case ARM::BI__builtin_neon_vtbl4_v:
  1772. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbl4),
  1773. Ops, "vtbl4");
  1774. case ARM::BI__builtin_neon_vtbx1_v:
  1775. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx1),
  1776. Ops, "vtbx1");
  1777. case ARM::BI__builtin_neon_vtbx2_v:
  1778. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx2),
  1779. Ops, "vtbx2");
  1780. case ARM::BI__builtin_neon_vtbx3_v:
  1781. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx3),
  1782. Ops, "vtbx3");
  1783. case ARM::BI__builtin_neon_vtbx4_v:
  1784. return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_vtbx4),
  1785. Ops, "vtbx4");
  1786. case ARM::BI__builtin_neon_vtst_v:
  1787. case ARM::BI__builtin_neon_vtstq_v: {
  1788. Ops[0] = Builder.CreateBitCast(Ops[0], Ty);
  1789. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1790. Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]);
  1791. Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0],
  1792. ConstantAggregateZero::get(Ty));
  1793. return Builder.CreateSExt(Ops[0], Ty, "vtst");
  1794. }
  1795. case ARM::BI__builtin_neon_vtrn_v:
  1796. case ARM::BI__builtin_neon_vtrnq_v: {
  1797. Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
  1798. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1799. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1800. Value *SV = 0;
  1801. for (unsigned vi = 0; vi != 2; ++vi) {
  1802. SmallVector<Constant*, 16> Indices;
  1803. for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
  1804. Indices.push_back(Builder.getInt32(i+vi));
  1805. Indices.push_back(Builder.getInt32(i+e+vi));
  1806. }
  1807. Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
  1808. SV = llvm::ConstantVector::get(Indices);
  1809. SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vtrn");
  1810. SV = Builder.CreateStore(SV, Addr);
  1811. }
  1812. return SV;
  1813. }
  1814. case ARM::BI__builtin_neon_vuzp_v:
  1815. case ARM::BI__builtin_neon_vuzpq_v: {
  1816. Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
  1817. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1818. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1819. Value *SV = 0;
  1820. for (unsigned vi = 0; vi != 2; ++vi) {
  1821. SmallVector<Constant*, 16> Indices;
  1822. for (unsigned i = 0, e = VTy->getNumElements(); i != e; ++i)
  1823. Indices.push_back(ConstantInt::get(Int32Ty, 2*i+vi));
  1824. Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
  1825. SV = llvm::ConstantVector::get(Indices);
  1826. SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vuzp");
  1827. SV = Builder.CreateStore(SV, Addr);
  1828. }
  1829. return SV;
  1830. }
  1831. case ARM::BI__builtin_neon_vzip_v:
  1832. case ARM::BI__builtin_neon_vzipq_v: {
  1833. Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty));
  1834. Ops[1] = Builder.CreateBitCast(Ops[1], Ty);
  1835. Ops[2] = Builder.CreateBitCast(Ops[2], Ty);
  1836. Value *SV = 0;
  1837. for (unsigned vi = 0; vi != 2; ++vi) {
  1838. SmallVector<Constant*, 16> Indices;
  1839. for (unsigned i = 0, e = VTy->getNumElements(); i != e; i += 2) {
  1840. Indices.push_back(ConstantInt::get(Int32Ty, (i + vi*e) >> 1));
  1841. Indices.push_back(ConstantInt::get(Int32Ty, ((i + vi*e) >> 1)+e));
  1842. }
  1843. Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ops[0], vi);
  1844. SV = llvm::ConstantVector::get(Indices);
  1845. SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vzip");
  1846. SV = Builder.CreateStore(SV, Addr);
  1847. }
  1848. return SV;
  1849. }
  1850. }
  1851. }
  1852. llvm::Value *CodeGenFunction::
  1853. BuildVector(const SmallVectorImpl<llvm::Value*> &Ops) {
  1854. assert((Ops.size() & (Ops.size() - 1)) == 0 &&
  1855. "Not a power-of-two sized vector!");
  1856. bool AllConstants = true;
  1857. for (unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i)
  1858. AllConstants &= isa<Constant>(Ops[i]);
  1859. // If this is a constant vector, create a ConstantVector.
  1860. if (AllConstants) {
  1861. SmallVector<llvm::Constant*, 16> CstOps;
  1862. for (unsigned i = 0, e = Ops.size(); i != e; ++i)
  1863. CstOps.push_back(cast<Constant>(Ops[i]));
  1864. return llvm::ConstantVector::get(CstOps);
  1865. }
  1866. // Otherwise, insertelement the values to build the vector.
  1867. Value *Result =
  1868. llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size()));
  1869. for (unsigned i = 0, e = Ops.size(); i != e; ++i)
  1870. Result = Builder.CreateInsertElement(Result, Ops[i], Builder.getInt32(i));
  1871. return Result;
  1872. }
  1873. Value *CodeGenFunction::EmitX86BuiltinExpr(unsigned BuiltinID,
  1874. const CallExpr *E) {
  1875. SmallVector<Value*, 4> Ops;
  1876. // Find out if any arguments are required to be integer constant expressions.
  1877. unsigned ICEArguments = 0;
  1878. ASTContext::GetBuiltinTypeError Error;
  1879. getContext().GetBuiltinType(BuiltinID, Error, &ICEArguments);
  1880. assert(Error == ASTContext::GE_None && "Should not codegen an error");
  1881. for (unsigned i = 0, e = E->getNumArgs(); i != e; i++) {
  1882. // If this is a normal argument, just emit it as a scalar.
  1883. if ((ICEArguments & (1 << i)) == 0) {
  1884. Ops.push_back(EmitScalarExpr(E->getArg(i)));
  1885. continue;
  1886. }
  1887. // If this is required to be a constant, constant fold it so that we know
  1888. // that the generated intrinsic gets a ConstantInt.
  1889. llvm::APSInt Result;
  1890. bool IsConst = E->getArg(i)->isIntegerConstantExpr(Result, getContext());
  1891. assert(IsConst && "Constant arg isn't actually constant?"); (void)IsConst;
  1892. Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result));
  1893. }
  1894. switch (BuiltinID) {
  1895. default: return 0;
  1896. case X86::BI__builtin_ia32_vec_init_v8qi:
  1897. case X86::BI__builtin_ia32_vec_init_v4hi:
  1898. case X86::BI__builtin_ia32_vec_init_v2si:
  1899. return Builder.CreateBitCast(BuildVector(Ops),
  1900. llvm::Type::getX86_MMXTy(getLLVMContext()));
  1901. case X86::BI__builtin_ia32_vec_ext_v2si:
  1902. return Builder.CreateExtractElement(Ops[0],
  1903. llvm::ConstantInt::get(Ops[1]->getType(), 0));
  1904. case X86::BI__builtin_ia32_ldmxcsr: {
  1905. llvm::Type *PtrTy = Int8PtrTy;
  1906. Value *One = llvm::ConstantInt::get(Int32Ty, 1);
  1907. Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
  1908. Builder.CreateStore(Ops[0], Tmp);
  1909. return Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_ldmxcsr),
  1910. Builder.CreateBitCast(Tmp, PtrTy));
  1911. }
  1912. case X86::BI__builtin_ia32_stmxcsr: {
  1913. llvm::Type *PtrTy = Int8PtrTy;
  1914. Value *One = llvm::ConstantInt::get(Int32Ty, 1);
  1915. Value *Tmp = Builder.CreateAlloca(Int32Ty, One);
  1916. Builder.CreateCall(CGM.getIntrinsic(Intrinsic::x86_sse_stmxcsr),
  1917. Builder.CreateBitCast(Tmp, PtrTy));
  1918. return Builder.CreateLoad(Tmp, "stmxcsr");
  1919. }
  1920. case X86::BI__builtin_ia32_storehps:
  1921. case X86::BI__builtin_ia32_storelps: {
  1922. llvm::Type *PtrTy = llvm::PointerType::getUnqual(Int64Ty);
  1923. llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
  1924. // cast val v2i64
  1925. Ops[1] = Builder.CreateBitCast(Ops[1], VecTy, "cast");
  1926. // extract (0, 1)
  1927. unsigned Index = BuiltinID == X86::BI__builtin_ia32_storelps ? 0 : 1;
  1928. llvm::Value *Idx = llvm::ConstantInt::get(Int32Ty, Index);
  1929. Ops[1] = Builder.CreateExtractElement(Ops[1], Idx, "extract");
  1930. // cast pointer to i64 & store
  1931. Ops[0] = Builder.CreateBitCast(Ops[0], PtrTy);
  1932. return Builder.CreateStore(Ops[1], Ops[0]);
  1933. }
  1934. case X86::BI__builtin_ia32_palignr: {
  1935. unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
  1936. // If palignr is shifting the pair of input vectors less than 9 bytes,
  1937. // emit a shuffle instruction.
  1938. if (shiftVal <= 8) {
  1939. SmallVector<llvm::Constant*, 8> Indices;
  1940. for (unsigned i = 0; i != 8; ++i)
  1941. Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
  1942. Value* SV = llvm::ConstantVector::get(Indices);
  1943. return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
  1944. }
  1945. // If palignr is shifting the pair of input vectors more than 8 but less
  1946. // than 16 bytes, emit a logical right shift of the destination.
  1947. if (shiftVal < 16) {
  1948. // MMX has these as 1 x i64 vectors for some odd optimization reasons.
  1949. llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 1);
  1950. Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
  1951. Ops[1] = llvm::ConstantInt::get(VecTy, (shiftVal-8) * 8);
  1952. // create i32 constant
  1953. llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_mmx_psrl_q);
  1954. return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
  1955. }
  1956. // If palignr is shifting the pair of vectors more than 16 bytes, emit zero.
  1957. return llvm::Constant::getNullValue(ConvertType(E->getType()));
  1958. }
  1959. case X86::BI__builtin_ia32_palignr128: {
  1960. unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
  1961. // If palignr is shifting the pair of input vectors less than 17 bytes,
  1962. // emit a shuffle instruction.
  1963. if (shiftVal <= 16) {
  1964. SmallVector<llvm::Constant*, 16> Indices;
  1965. for (unsigned i = 0; i != 16; ++i)
  1966. Indices.push_back(llvm::ConstantInt::get(Int32Ty, shiftVal + i));
  1967. Value* SV = llvm::ConstantVector::get(Indices);
  1968. return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
  1969. }
  1970. // If palignr is shifting the pair of input vectors more than 16 but less
  1971. // than 32 bytes, emit a logical right shift of the destination.
  1972. if (shiftVal < 32) {
  1973. llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 2);
  1974. Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
  1975. Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
  1976. // create i32 constant
  1977. llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_sse2_psrl_dq);
  1978. return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
  1979. }
  1980. // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
  1981. return llvm::Constant::getNullValue(ConvertType(E->getType()));
  1982. }
  1983. case X86::BI__builtin_ia32_palignr256: {
  1984. unsigned shiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue();
  1985. // If palignr is shifting the pair of input vectors less than 17 bytes,
  1986. // emit a shuffle instruction.
  1987. if (shiftVal <= 16) {
  1988. SmallVector<llvm::Constant*, 32> Indices;
  1989. // 256-bit palignr operates on 128-bit lanes so we need to handle that
  1990. for (unsigned l = 0; l != 2; ++l) {
  1991. unsigned LaneStart = l * 16;
  1992. unsigned LaneEnd = (l+1) * 16;
  1993. for (unsigned i = 0; i != 16; ++i) {
  1994. unsigned Idx = shiftVal + i + LaneStart;
  1995. if (Idx >= LaneEnd) Idx += 16; // end of lane, switch operand
  1996. Indices.push_back(llvm::ConstantInt::get(Int32Ty, Idx));
  1997. }
  1998. }
  1999. Value* SV = llvm::ConstantVector::get(Indices);
  2000. return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr");
  2001. }
  2002. // If palignr is shifting the pair of input vectors more than 16 but less
  2003. // than 32 bytes, emit a logical right shift of the destination.
  2004. if (shiftVal < 32) {
  2005. llvm::Type *VecTy = llvm::VectorType::get(Int64Ty, 4);
  2006. Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast");
  2007. Ops[1] = llvm::ConstantInt::get(Int32Ty, (shiftVal-16) * 8);
  2008. // create i32 constant
  2009. llvm::Function *F = CGM.getIntrinsic(Intrinsic::x86_avx2_psrl_dq);
  2010. return Builder.CreateCall(F, makeArrayRef(&Ops[0], 2), "palignr");
  2011. }
  2012. // If palignr is shifting the pair of vectors more than 32 bytes, emit zero.
  2013. return llvm::Constant::getNullValue(ConvertType(E->getType()));
  2014. }
  2015. case X86::BI__builtin_ia32_movntps:
  2016. case X86::BI__builtin_ia32_movntpd:
  2017. case X86::BI__builtin_ia32_movntdq:
  2018. case X86::BI__builtin_ia32_movnti: {
  2019. llvm::MDNode *Node = llvm::MDNode::get(getLLVMContext(),
  2020. Builder.getInt32(1));
  2021. // Convert the type of the pointer to a pointer to the stored type.
  2022. Value *BC = Builder.CreateBitCast(Ops[0],
  2023. llvm::PointerType::getUnqual(Ops[1]->getType()),
  2024. "cast");
  2025. StoreInst *SI = Builder.CreateStore(Ops[1], BC);
  2026. SI->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
  2027. SI->setAlignment(16);
  2028. return SI;
  2029. }
  2030. // 3DNow!
  2031. case X86::BI__builtin_ia32_pswapdsf:
  2032. case X86::BI__builtin_ia32_pswapdsi: {
  2033. const char *name = 0;
  2034. Intrinsic::ID ID = Intrinsic::not_intrinsic;
  2035. switch(BuiltinID) {
  2036. default: llvm_unreachable("Unsupported intrinsic!");
  2037. case X86::BI__builtin_ia32_pswapdsf:
  2038. case X86::BI__builtin_ia32_pswapdsi:
  2039. name = "pswapd";
  2040. ID = Intrinsic::x86_3dnowa_pswapd;
  2041. break;
  2042. }
  2043. llvm::Function *F = CGM.getIntrinsic(ID);
  2044. return Builder.CreateCall(F, Ops, name);
  2045. }
  2046. }
  2047. }
  2048. Value *CodeGenFunction::EmitHexagonBuiltinExpr(unsigned BuiltinID,
  2049. const CallExpr *E) {
  2050. llvm::SmallVector<Value*, 4> Ops;
  2051. for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
  2052. Ops.push_back(EmitScalarExpr(E->getArg(i)));
  2053. Intrinsic::ID ID = Intrinsic::not_intrinsic;
  2054. switch (BuiltinID) {
  2055. default: return 0;
  2056. case Hexagon::BI__builtin_HEXAGON_C2_cmpeq:
  2057. ID = Intrinsic::hexagon_C2_cmpeq; break;
  2058. case Hexagon::BI__builtin_HEXAGON_C2_cmpgt:
  2059. ID = Intrinsic::hexagon_C2_cmpgt; break;
  2060. case Hexagon::BI__builtin_HEXAGON_C2_cmpgtu:
  2061. ID = Intrinsic::hexagon_C2_cmpgtu; break;
  2062. case Hexagon::BI__builtin_HEXAGON_C2_cmpeqp:
  2063. ID = Intrinsic::hexagon_C2_cmpeqp; break;
  2064. case Hexagon::BI__builtin_HEXAGON_C2_cmpgtp:
  2065. ID = Intrinsic::hexagon_C2_cmpgtp; break;
  2066. case Hexagon::BI__builtin_HEXAGON_C2_cmpgtup:
  2067. ID = Intrinsic::hexagon_C2_cmpgtup; break;
  2068. case Hexagon::BI__builtin_HEXAGON_C2_bitsset:
  2069. ID = Intrinsic::hexagon_C2_bitsset; break;
  2070. case Hexagon::BI__builtin_HEXAGON_C2_bitsclr:
  2071. ID = Intrinsic::hexagon_C2_bitsclr; break;
  2072. case Hexagon::BI__builtin_HEXAGON_C2_cmpeqi:
  2073. ID = Intrinsic::hexagon_C2_cmpeqi; break;
  2074. case Hexagon::BI__builtin_HEXAGON_C2_cmpgti:
  2075. ID = Intrinsic::hexagon_C2_cmpgti; break;
  2076. case Hexagon::BI__builtin_HEXAGON_C2_cmpgtui:
  2077. ID = Intrinsic::hexagon_C2_cmpgtui; break;
  2078. case Hexagon::BI__builtin_HEXAGON_C2_cmpgei:
  2079. ID = Intrinsic::hexagon_C2_cmpgei; break;
  2080. case Hexagon::BI__builtin_HEXAGON_C2_cmpgeui:
  2081. ID = Intrinsic::hexagon_C2_cmpgeui; break;
  2082. case Hexagon::BI__builtin_HEXAGON_C2_cmplt:
  2083. ID = Intrinsic::hexagon_C2_cmplt; break;
  2084. case Hexagon::BI__builtin_HEXAGON_C2_cmpltu:
  2085. ID = Intrinsic::hexagon_C2_cmpltu; break;
  2086. case Hexagon::BI__builtin_HEXAGON_C2_bitsclri:
  2087. ID = Intrinsic::hexagon_C2_bitsclri; break;
  2088. case Hexagon::BI__builtin_HEXAGON_C2_and:
  2089. ID = Intrinsic::hexagon_C2_and; break;
  2090. case Hexagon::BI__builtin_HEXAGON_C2_or:
  2091. ID = Intrinsic::hexagon_C2_or; break;
  2092. case Hexagon::BI__builtin_HEXAGON_C2_xor:
  2093. ID = Intrinsic::hexagon_C2_xor; break;
  2094. case Hexagon::BI__builtin_HEXAGON_C2_andn:
  2095. ID = Intrinsic::hexagon_C2_andn; break;
  2096. case Hexagon::BI__builtin_HEXAGON_C2_not:
  2097. ID = Intrinsic::hexagon_C2_not; break;
  2098. case Hexagon::BI__builtin_HEXAGON_C2_orn:
  2099. ID = Intrinsic::hexagon_C2_orn; break;
  2100. case Hexagon::BI__builtin_HEXAGON_C2_pxfer_map:
  2101. ID = Intrinsic::hexagon_C2_pxfer_map; break;
  2102. case Hexagon::BI__builtin_HEXAGON_C2_any8:
  2103. ID = Intrinsic::hexagon_C2_any8; break;
  2104. case Hexagon::BI__builtin_HEXAGON_C2_all8:
  2105. ID = Intrinsic::hexagon_C2_all8; break;
  2106. case Hexagon::BI__builtin_HEXAGON_C2_vitpack:
  2107. ID = Intrinsic::hexagon_C2_vitpack; break;
  2108. case Hexagon::BI__builtin_HEXAGON_C2_mux:
  2109. ID = Intrinsic::hexagon_C2_mux; break;
  2110. case Hexagon::BI__builtin_HEXAGON_C2_muxii:
  2111. ID = Intrinsic::hexagon_C2_muxii; break;
  2112. case Hexagon::BI__builtin_HEXAGON_C2_muxir:
  2113. ID = Intrinsic::hexagon_C2_muxir; break;
  2114. case Hexagon::BI__builtin_HEXAGON_C2_muxri:
  2115. ID = Intrinsic::hexagon_C2_muxri; break;
  2116. case Hexagon::BI__builtin_HEXAGON_C2_vmux:
  2117. ID = Intrinsic::hexagon_C2_vmux; break;
  2118. case Hexagon::BI__builtin_HEXAGON_C2_mask:
  2119. ID = Intrinsic::hexagon_C2_mask; break;
  2120. case Hexagon::BI__builtin_HEXAGON_A2_vcmpbeq:
  2121. ID = Intrinsic::hexagon_A2_vcmpbeq; break;
  2122. case Hexagon::BI__builtin_HEXAGON_A2_vcmpbgtu:
  2123. ID = Intrinsic::hexagon_A2_vcmpbgtu; break;
  2124. case Hexagon::BI__builtin_HEXAGON_A2_vcmpheq:
  2125. ID = Intrinsic::hexagon_A2_vcmpheq; break;
  2126. case Hexagon::BI__builtin_HEXAGON_A2_vcmphgt:
  2127. ID = Intrinsic::hexagon_A2_vcmphgt; break;
  2128. case Hexagon::BI__builtin_HEXAGON_A2_vcmphgtu:
  2129. ID = Intrinsic::hexagon_A2_vcmphgtu; break;
  2130. case Hexagon::BI__builtin_HEXAGON_A2_vcmpweq:
  2131. ID = Intrinsic::hexagon_A2_vcmpweq; break;
  2132. case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgt:
  2133. ID = Intrinsic::hexagon_A2_vcmpwgt; break;
  2134. case Hexagon::BI__builtin_HEXAGON_A2_vcmpwgtu:
  2135. ID = Intrinsic::hexagon_A2_vcmpwgtu; break;
  2136. case Hexagon::BI__builtin_HEXAGON_C2_tfrpr:
  2137. ID = Intrinsic::hexagon_C2_tfrpr; break;
  2138. case Hexagon::BI__builtin_HEXAGON_C2_tfrrp:
  2139. ID = Intrinsic::hexagon_C2_tfrrp; break;
  2140. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s0:
  2141. ID = Intrinsic::hexagon_M2_mpy_acc_hh_s0; break;
  2142. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hh_s1:
  2143. ID = Intrinsic::hexagon_M2_mpy_acc_hh_s1; break;
  2144. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s0:
  2145. ID = Intrinsic::hexagon_M2_mpy_acc_hl_s0; break;
  2146. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_hl_s1:
  2147. ID = Intrinsic::hexagon_M2_mpy_acc_hl_s1; break;
  2148. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s0:
  2149. ID = Intrinsic::hexagon_M2_mpy_acc_lh_s0; break;
  2150. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_lh_s1:
  2151. ID = Intrinsic::hexagon_M2_mpy_acc_lh_s1; break;
  2152. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s0:
  2153. ID = Intrinsic::hexagon_M2_mpy_acc_ll_s0; break;
  2154. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_ll_s1:
  2155. ID = Intrinsic::hexagon_M2_mpy_acc_ll_s1; break;
  2156. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s0:
  2157. ID = Intrinsic::hexagon_M2_mpy_nac_hh_s0; break;
  2158. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hh_s1:
  2159. ID = Intrinsic::hexagon_M2_mpy_nac_hh_s1; break;
  2160. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s0:
  2161. ID = Intrinsic::hexagon_M2_mpy_nac_hl_s0; break;
  2162. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_hl_s1:
  2163. ID = Intrinsic::hexagon_M2_mpy_nac_hl_s1; break;
  2164. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s0:
  2165. ID = Intrinsic::hexagon_M2_mpy_nac_lh_s0; break;
  2166. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_lh_s1:
  2167. ID = Intrinsic::hexagon_M2_mpy_nac_lh_s1; break;
  2168. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s0:
  2169. ID = Intrinsic::hexagon_M2_mpy_nac_ll_s0; break;
  2170. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_ll_s1:
  2171. ID = Intrinsic::hexagon_M2_mpy_nac_ll_s1; break;
  2172. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s0:
  2173. ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s0; break;
  2174. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hh_s1:
  2175. ID = Intrinsic::hexagon_M2_mpy_acc_sat_hh_s1; break;
  2176. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s0:
  2177. ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s0; break;
  2178. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_hl_s1:
  2179. ID = Intrinsic::hexagon_M2_mpy_acc_sat_hl_s1; break;
  2180. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s0:
  2181. ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s0; break;
  2182. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_lh_s1:
  2183. ID = Intrinsic::hexagon_M2_mpy_acc_sat_lh_s1; break;
  2184. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s0:
  2185. ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s0; break;
  2186. case Hexagon::BI__builtin_HEXAGON_M2_mpy_acc_sat_ll_s1:
  2187. ID = Intrinsic::hexagon_M2_mpy_acc_sat_ll_s1; break;
  2188. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s0:
  2189. ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s0; break;
  2190. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hh_s1:
  2191. ID = Intrinsic::hexagon_M2_mpy_nac_sat_hh_s1; break;
  2192. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s0:
  2193. ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s0; break;
  2194. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_hl_s1:
  2195. ID = Intrinsic::hexagon_M2_mpy_nac_sat_hl_s1; break;
  2196. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s0:
  2197. ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s0; break;
  2198. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_lh_s1:
  2199. ID = Intrinsic::hexagon_M2_mpy_nac_sat_lh_s1; break;
  2200. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s0:
  2201. ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s0; break;
  2202. case Hexagon::BI__builtin_HEXAGON_M2_mpy_nac_sat_ll_s1:
  2203. ID = Intrinsic::hexagon_M2_mpy_nac_sat_ll_s1; break;
  2204. case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s0:
  2205. ID = Intrinsic::hexagon_M2_mpy_hh_s0; break;
  2206. case Hexagon::BI__builtin_HEXAGON_M2_mpy_hh_s1:
  2207. ID = Intrinsic::hexagon_M2_mpy_hh_s1; break;
  2208. case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s0:
  2209. ID = Intrinsic::hexagon_M2_mpy_hl_s0; break;
  2210. case Hexagon::BI__builtin_HEXAGON_M2_mpy_hl_s1:
  2211. ID = Intrinsic::hexagon_M2_mpy_hl_s1; break;
  2212. case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s0:
  2213. ID = Intrinsic::hexagon_M2_mpy_lh_s0; break;
  2214. case Hexagon::BI__builtin_HEXAGON_M2_mpy_lh_s1:
  2215. ID = Intrinsic::hexagon_M2_mpy_lh_s1; break;
  2216. case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s0:
  2217. ID = Intrinsic::hexagon_M2_mpy_ll_s0; break;
  2218. case Hexagon::BI__builtin_HEXAGON_M2_mpy_ll_s1:
  2219. ID = Intrinsic::hexagon_M2_mpy_ll_s1; break;
  2220. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s0:
  2221. ID = Intrinsic::hexagon_M2_mpy_sat_hh_s0; break;
  2222. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hh_s1:
  2223. ID = Intrinsic::hexagon_M2_mpy_sat_hh_s1; break;
  2224. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s0:
  2225. ID = Intrinsic::hexagon_M2_mpy_sat_hl_s0; break;
  2226. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_hl_s1:
  2227. ID = Intrinsic::hexagon_M2_mpy_sat_hl_s1; break;
  2228. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s0:
  2229. ID = Intrinsic::hexagon_M2_mpy_sat_lh_s0; break;
  2230. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_lh_s1:
  2231. ID = Intrinsic::hexagon_M2_mpy_sat_lh_s1; break;
  2232. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s0:
  2233. ID = Intrinsic::hexagon_M2_mpy_sat_ll_s0; break;
  2234. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_ll_s1:
  2235. ID = Intrinsic::hexagon_M2_mpy_sat_ll_s1; break;
  2236. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s0:
  2237. ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s0; break;
  2238. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hh_s1:
  2239. ID = Intrinsic::hexagon_M2_mpy_rnd_hh_s1; break;
  2240. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s0:
  2241. ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s0; break;
  2242. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_hl_s1:
  2243. ID = Intrinsic::hexagon_M2_mpy_rnd_hl_s1; break;
  2244. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s0:
  2245. ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s0; break;
  2246. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_lh_s1:
  2247. ID = Intrinsic::hexagon_M2_mpy_rnd_lh_s1; break;
  2248. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s0:
  2249. ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s0; break;
  2250. case Hexagon::BI__builtin_HEXAGON_M2_mpy_rnd_ll_s1:
  2251. ID = Intrinsic::hexagon_M2_mpy_rnd_ll_s1; break;
  2252. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s0:
  2253. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s0; break;
  2254. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hh_s1:
  2255. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hh_s1; break;
  2256. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s0:
  2257. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s0; break;
  2258. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_hl_s1:
  2259. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_hl_s1; break;
  2260. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s0:
  2261. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s0; break;
  2262. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_lh_s1:
  2263. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_lh_s1; break;
  2264. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s0:
  2265. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s0; break;
  2266. case Hexagon::BI__builtin_HEXAGON_M2_mpy_sat_rnd_ll_s1:
  2267. ID = Intrinsic::hexagon_M2_mpy_sat_rnd_ll_s1; break;
  2268. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s0:
  2269. ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s0; break;
  2270. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hh_s1:
  2271. ID = Intrinsic::hexagon_M2_mpyd_acc_hh_s1; break;
  2272. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s0:
  2273. ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s0; break;
  2274. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_hl_s1:
  2275. ID = Intrinsic::hexagon_M2_mpyd_acc_hl_s1; break;
  2276. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s0:
  2277. ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s0; break;
  2278. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_lh_s1:
  2279. ID = Intrinsic::hexagon_M2_mpyd_acc_lh_s1; break;
  2280. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s0:
  2281. ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s0; break;
  2282. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_acc_ll_s1:
  2283. ID = Intrinsic::hexagon_M2_mpyd_acc_ll_s1; break;
  2284. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s0:
  2285. ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s0; break;
  2286. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hh_s1:
  2287. ID = Intrinsic::hexagon_M2_mpyd_nac_hh_s1; break;
  2288. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s0:
  2289. ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s0; break;
  2290. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_hl_s1:
  2291. ID = Intrinsic::hexagon_M2_mpyd_nac_hl_s1; break;
  2292. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s0:
  2293. ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s0; break;
  2294. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_lh_s1:
  2295. ID = Intrinsic::hexagon_M2_mpyd_nac_lh_s1; break;
  2296. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s0:
  2297. ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s0; break;
  2298. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_nac_ll_s1:
  2299. ID = Intrinsic::hexagon_M2_mpyd_nac_ll_s1; break;
  2300. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s0:
  2301. ID = Intrinsic::hexagon_M2_mpyd_hh_s0; break;
  2302. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hh_s1:
  2303. ID = Intrinsic::hexagon_M2_mpyd_hh_s1; break;
  2304. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s0:
  2305. ID = Intrinsic::hexagon_M2_mpyd_hl_s0; break;
  2306. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_hl_s1:
  2307. ID = Intrinsic::hexagon_M2_mpyd_hl_s1; break;
  2308. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s0:
  2309. ID = Intrinsic::hexagon_M2_mpyd_lh_s0; break;
  2310. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_lh_s1:
  2311. ID = Intrinsic::hexagon_M2_mpyd_lh_s1; break;
  2312. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s0:
  2313. ID = Intrinsic::hexagon_M2_mpyd_ll_s0; break;
  2314. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_ll_s1:
  2315. ID = Intrinsic::hexagon_M2_mpyd_ll_s1; break;
  2316. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s0:
  2317. ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s0; break;
  2318. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hh_s1:
  2319. ID = Intrinsic::hexagon_M2_mpyd_rnd_hh_s1; break;
  2320. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s0:
  2321. ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s0; break;
  2322. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_hl_s1:
  2323. ID = Intrinsic::hexagon_M2_mpyd_rnd_hl_s1; break;
  2324. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s0:
  2325. ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s0; break;
  2326. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_lh_s1:
  2327. ID = Intrinsic::hexagon_M2_mpyd_rnd_lh_s1; break;
  2328. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s0:
  2329. ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s0; break;
  2330. case Hexagon::BI__builtin_HEXAGON_M2_mpyd_rnd_ll_s1:
  2331. ID = Intrinsic::hexagon_M2_mpyd_rnd_ll_s1; break;
  2332. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s0:
  2333. ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s0; break;
  2334. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hh_s1:
  2335. ID = Intrinsic::hexagon_M2_mpyu_acc_hh_s1; break;
  2336. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s0:
  2337. ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s0; break;
  2338. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_hl_s1:
  2339. ID = Intrinsic::hexagon_M2_mpyu_acc_hl_s1; break;
  2340. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s0:
  2341. ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s0; break;
  2342. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_lh_s1:
  2343. ID = Intrinsic::hexagon_M2_mpyu_acc_lh_s1; break;
  2344. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s0:
  2345. ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s0; break;
  2346. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_acc_ll_s1:
  2347. ID = Intrinsic::hexagon_M2_mpyu_acc_ll_s1; break;
  2348. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s0:
  2349. ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s0; break;
  2350. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hh_s1:
  2351. ID = Intrinsic::hexagon_M2_mpyu_nac_hh_s1; break;
  2352. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s0:
  2353. ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s0; break;
  2354. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_hl_s1:
  2355. ID = Intrinsic::hexagon_M2_mpyu_nac_hl_s1; break;
  2356. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s0:
  2357. ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s0; break;
  2358. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_lh_s1:
  2359. ID = Intrinsic::hexagon_M2_mpyu_nac_lh_s1; break;
  2360. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s0:
  2361. ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s0; break;
  2362. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_nac_ll_s1:
  2363. ID = Intrinsic::hexagon_M2_mpyu_nac_ll_s1; break;
  2364. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s0:
  2365. ID = Intrinsic::hexagon_M2_mpyu_hh_s0; break;
  2366. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hh_s1:
  2367. ID = Intrinsic::hexagon_M2_mpyu_hh_s1; break;
  2368. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s0:
  2369. ID = Intrinsic::hexagon_M2_mpyu_hl_s0; break;
  2370. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_hl_s1:
  2371. ID = Intrinsic::hexagon_M2_mpyu_hl_s1; break;
  2372. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s0:
  2373. ID = Intrinsic::hexagon_M2_mpyu_lh_s0; break;
  2374. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_lh_s1:
  2375. ID = Intrinsic::hexagon_M2_mpyu_lh_s1; break;
  2376. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s0:
  2377. ID = Intrinsic::hexagon_M2_mpyu_ll_s0; break;
  2378. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_ll_s1:
  2379. ID = Intrinsic::hexagon_M2_mpyu_ll_s1; break;
  2380. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s0:
  2381. ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s0; break;
  2382. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hh_s1:
  2383. ID = Intrinsic::hexagon_M2_mpyud_acc_hh_s1; break;
  2384. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s0:
  2385. ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s0; break;
  2386. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_hl_s1:
  2387. ID = Intrinsic::hexagon_M2_mpyud_acc_hl_s1; break;
  2388. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s0:
  2389. ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s0; break;
  2390. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_lh_s1:
  2391. ID = Intrinsic::hexagon_M2_mpyud_acc_lh_s1; break;
  2392. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s0:
  2393. ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s0; break;
  2394. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_acc_ll_s1:
  2395. ID = Intrinsic::hexagon_M2_mpyud_acc_ll_s1; break;
  2396. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s0:
  2397. ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s0; break;
  2398. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hh_s1:
  2399. ID = Intrinsic::hexagon_M2_mpyud_nac_hh_s1; break;
  2400. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s0:
  2401. ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s0; break;
  2402. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_hl_s1:
  2403. ID = Intrinsic::hexagon_M2_mpyud_nac_hl_s1; break;
  2404. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s0:
  2405. ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s0; break;
  2406. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_lh_s1:
  2407. ID = Intrinsic::hexagon_M2_mpyud_nac_lh_s1; break;
  2408. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s0:
  2409. ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s0; break;
  2410. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_nac_ll_s1:
  2411. ID = Intrinsic::hexagon_M2_mpyud_nac_ll_s1; break;
  2412. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s0:
  2413. ID = Intrinsic::hexagon_M2_mpyud_hh_s0; break;
  2414. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hh_s1:
  2415. ID = Intrinsic::hexagon_M2_mpyud_hh_s1; break;
  2416. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s0:
  2417. ID = Intrinsic::hexagon_M2_mpyud_hl_s0; break;
  2418. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_hl_s1:
  2419. ID = Intrinsic::hexagon_M2_mpyud_hl_s1; break;
  2420. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s0:
  2421. ID = Intrinsic::hexagon_M2_mpyud_lh_s0; break;
  2422. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_lh_s1:
  2423. ID = Intrinsic::hexagon_M2_mpyud_lh_s1; break;
  2424. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s0:
  2425. ID = Intrinsic::hexagon_M2_mpyud_ll_s0; break;
  2426. case Hexagon::BI__builtin_HEXAGON_M2_mpyud_ll_s1:
  2427. ID = Intrinsic::hexagon_M2_mpyud_ll_s1; break;
  2428. case Hexagon::BI__builtin_HEXAGON_M2_mpysmi:
  2429. ID = Intrinsic::hexagon_M2_mpysmi; break;
  2430. case Hexagon::BI__builtin_HEXAGON_M2_macsip:
  2431. ID = Intrinsic::hexagon_M2_macsip; break;
  2432. case Hexagon::BI__builtin_HEXAGON_M2_macsin:
  2433. ID = Intrinsic::hexagon_M2_macsin; break;
  2434. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_s0:
  2435. ID = Intrinsic::hexagon_M2_dpmpyss_s0; break;
  2436. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_acc_s0:
  2437. ID = Intrinsic::hexagon_M2_dpmpyss_acc_s0; break;
  2438. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_nac_s0:
  2439. ID = Intrinsic::hexagon_M2_dpmpyss_nac_s0; break;
  2440. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_s0:
  2441. ID = Intrinsic::hexagon_M2_dpmpyuu_s0; break;
  2442. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_acc_s0:
  2443. ID = Intrinsic::hexagon_M2_dpmpyuu_acc_s0; break;
  2444. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyuu_nac_s0:
  2445. ID = Intrinsic::hexagon_M2_dpmpyuu_nac_s0; break;
  2446. case Hexagon::BI__builtin_HEXAGON_M2_mpy_up:
  2447. ID = Intrinsic::hexagon_M2_mpy_up; break;
  2448. case Hexagon::BI__builtin_HEXAGON_M2_mpyu_up:
  2449. ID = Intrinsic::hexagon_M2_mpyu_up; break;
  2450. case Hexagon::BI__builtin_HEXAGON_M2_dpmpyss_rnd_s0:
  2451. ID = Intrinsic::hexagon_M2_dpmpyss_rnd_s0; break;
  2452. case Hexagon::BI__builtin_HEXAGON_M2_mpyi:
  2453. ID = Intrinsic::hexagon_M2_mpyi; break;
  2454. case Hexagon::BI__builtin_HEXAGON_M2_mpyui:
  2455. ID = Intrinsic::hexagon_M2_mpyui; break;
  2456. case Hexagon::BI__builtin_HEXAGON_M2_maci:
  2457. ID = Intrinsic::hexagon_M2_maci; break;
  2458. case Hexagon::BI__builtin_HEXAGON_M2_acci:
  2459. ID = Intrinsic::hexagon_M2_acci; break;
  2460. case Hexagon::BI__builtin_HEXAGON_M2_accii:
  2461. ID = Intrinsic::hexagon_M2_accii; break;
  2462. case Hexagon::BI__builtin_HEXAGON_M2_nacci:
  2463. ID = Intrinsic::hexagon_M2_nacci; break;
  2464. case Hexagon::BI__builtin_HEXAGON_M2_naccii:
  2465. ID = Intrinsic::hexagon_M2_naccii; break;
  2466. case Hexagon::BI__builtin_HEXAGON_M2_subacc:
  2467. ID = Intrinsic::hexagon_M2_subacc; break;
  2468. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0:
  2469. ID = Intrinsic::hexagon_M2_vmpy2s_s0; break;
  2470. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1:
  2471. ID = Intrinsic::hexagon_M2_vmpy2s_s1; break;
  2472. case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s0:
  2473. ID = Intrinsic::hexagon_M2_vmac2s_s0; break;
  2474. case Hexagon::BI__builtin_HEXAGON_M2_vmac2s_s1:
  2475. ID = Intrinsic::hexagon_M2_vmac2s_s1; break;
  2476. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s0pack:
  2477. ID = Intrinsic::hexagon_M2_vmpy2s_s0pack; break;
  2478. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2s_s1pack:
  2479. ID = Intrinsic::hexagon_M2_vmpy2s_s1pack; break;
  2480. case Hexagon::BI__builtin_HEXAGON_M2_vmac2:
  2481. ID = Intrinsic::hexagon_M2_vmac2; break;
  2482. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s0:
  2483. ID = Intrinsic::hexagon_M2_vmpy2es_s0; break;
  2484. case Hexagon::BI__builtin_HEXAGON_M2_vmpy2es_s1:
  2485. ID = Intrinsic::hexagon_M2_vmpy2es_s1; break;
  2486. case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s0:
  2487. ID = Intrinsic::hexagon_M2_vmac2es_s0; break;
  2488. case Hexagon::BI__builtin_HEXAGON_M2_vmac2es_s1:
  2489. ID = Intrinsic::hexagon_M2_vmac2es_s1; break;
  2490. case Hexagon::BI__builtin_HEXAGON_M2_vmac2es:
  2491. ID = Intrinsic::hexagon_M2_vmac2es; break;
  2492. case Hexagon::BI__builtin_HEXAGON_M2_vrmac_s0:
  2493. ID = Intrinsic::hexagon_M2_vrmac_s0; break;
  2494. case Hexagon::BI__builtin_HEXAGON_M2_vrmpy_s0:
  2495. ID = Intrinsic::hexagon_M2_vrmpy_s0; break;
  2496. case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s0:
  2497. ID = Intrinsic::hexagon_M2_vdmpyrs_s0; break;
  2498. case Hexagon::BI__builtin_HEXAGON_M2_vdmpyrs_s1:
  2499. ID = Intrinsic::hexagon_M2_vdmpyrs_s1; break;
  2500. case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s0:
  2501. ID = Intrinsic::hexagon_M2_vdmacs_s0; break;
  2502. case Hexagon::BI__builtin_HEXAGON_M2_vdmacs_s1:
  2503. ID = Intrinsic::hexagon_M2_vdmacs_s1; break;
  2504. case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s0:
  2505. ID = Intrinsic::hexagon_M2_vdmpys_s0; break;
  2506. case Hexagon::BI__builtin_HEXAGON_M2_vdmpys_s1:
  2507. ID = Intrinsic::hexagon_M2_vdmpys_s1; break;
  2508. case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s0:
  2509. ID = Intrinsic::hexagon_M2_cmpyrs_s0; break;
  2510. case Hexagon::BI__builtin_HEXAGON_M2_cmpyrs_s1:
  2511. ID = Intrinsic::hexagon_M2_cmpyrs_s1; break;
  2512. case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s0:
  2513. ID = Intrinsic::hexagon_M2_cmpyrsc_s0; break;
  2514. case Hexagon::BI__builtin_HEXAGON_M2_cmpyrsc_s1:
  2515. ID = Intrinsic::hexagon_M2_cmpyrsc_s1; break;
  2516. case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s0:
  2517. ID = Intrinsic::hexagon_M2_cmacs_s0; break;
  2518. case Hexagon::BI__builtin_HEXAGON_M2_cmacs_s1:
  2519. ID = Intrinsic::hexagon_M2_cmacs_s1; break;
  2520. case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s0:
  2521. ID = Intrinsic::hexagon_M2_cmacsc_s0; break;
  2522. case Hexagon::BI__builtin_HEXAGON_M2_cmacsc_s1:
  2523. ID = Intrinsic::hexagon_M2_cmacsc_s1; break;
  2524. case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s0:
  2525. ID = Intrinsic::hexagon_M2_cmpys_s0; break;
  2526. case Hexagon::BI__builtin_HEXAGON_M2_cmpys_s1:
  2527. ID = Intrinsic::hexagon_M2_cmpys_s1; break;
  2528. case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s0:
  2529. ID = Intrinsic::hexagon_M2_cmpysc_s0; break;
  2530. case Hexagon::BI__builtin_HEXAGON_M2_cmpysc_s1:
  2531. ID = Intrinsic::hexagon_M2_cmpysc_s1; break;
  2532. case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s0:
  2533. ID = Intrinsic::hexagon_M2_cnacs_s0; break;
  2534. case Hexagon::BI__builtin_HEXAGON_M2_cnacs_s1:
  2535. ID = Intrinsic::hexagon_M2_cnacs_s1; break;
  2536. case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s0:
  2537. ID = Intrinsic::hexagon_M2_cnacsc_s0; break;
  2538. case Hexagon::BI__builtin_HEXAGON_M2_cnacsc_s1:
  2539. ID = Intrinsic::hexagon_M2_cnacsc_s1; break;
  2540. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1:
  2541. ID = Intrinsic::hexagon_M2_vrcmpys_s1; break;
  2542. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_acc_s1:
  2543. ID = Intrinsic::hexagon_M2_vrcmpys_acc_s1; break;
  2544. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpys_s1rp:
  2545. ID = Intrinsic::hexagon_M2_vrcmpys_s1rp; break;
  2546. case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s0:
  2547. ID = Intrinsic::hexagon_M2_mmacls_s0; break;
  2548. case Hexagon::BI__builtin_HEXAGON_M2_mmacls_s1:
  2549. ID = Intrinsic::hexagon_M2_mmacls_s1; break;
  2550. case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s0:
  2551. ID = Intrinsic::hexagon_M2_mmachs_s0; break;
  2552. case Hexagon::BI__builtin_HEXAGON_M2_mmachs_s1:
  2553. ID = Intrinsic::hexagon_M2_mmachs_s1; break;
  2554. case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s0:
  2555. ID = Intrinsic::hexagon_M2_mmpyl_s0; break;
  2556. case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_s1:
  2557. ID = Intrinsic::hexagon_M2_mmpyl_s1; break;
  2558. case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s0:
  2559. ID = Intrinsic::hexagon_M2_mmpyh_s0; break;
  2560. case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_s1:
  2561. ID = Intrinsic::hexagon_M2_mmpyh_s1; break;
  2562. case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs0:
  2563. ID = Intrinsic::hexagon_M2_mmacls_rs0; break;
  2564. case Hexagon::BI__builtin_HEXAGON_M2_mmacls_rs1:
  2565. ID = Intrinsic::hexagon_M2_mmacls_rs1; break;
  2566. case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs0:
  2567. ID = Intrinsic::hexagon_M2_mmachs_rs0; break;
  2568. case Hexagon::BI__builtin_HEXAGON_M2_mmachs_rs1:
  2569. ID = Intrinsic::hexagon_M2_mmachs_rs1; break;
  2570. case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs0:
  2571. ID = Intrinsic::hexagon_M2_mmpyl_rs0; break;
  2572. case Hexagon::BI__builtin_HEXAGON_M2_mmpyl_rs1:
  2573. ID = Intrinsic::hexagon_M2_mmpyl_rs1; break;
  2574. case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs0:
  2575. ID = Intrinsic::hexagon_M2_mmpyh_rs0; break;
  2576. case Hexagon::BI__builtin_HEXAGON_M2_mmpyh_rs1:
  2577. ID = Intrinsic::hexagon_M2_mmpyh_rs1; break;
  2578. case Hexagon::BI__builtin_HEXAGON_M2_hmmpyl_rs1:
  2579. ID = Intrinsic::hexagon_M2_hmmpyl_rs1; break;
  2580. case Hexagon::BI__builtin_HEXAGON_M2_hmmpyh_rs1:
  2581. ID = Intrinsic::hexagon_M2_hmmpyh_rs1; break;
  2582. case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s0:
  2583. ID = Intrinsic::hexagon_M2_mmaculs_s0; break;
  2584. case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_s1:
  2585. ID = Intrinsic::hexagon_M2_mmaculs_s1; break;
  2586. case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s0:
  2587. ID = Intrinsic::hexagon_M2_mmacuhs_s0; break;
  2588. case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_s1:
  2589. ID = Intrinsic::hexagon_M2_mmacuhs_s1; break;
  2590. case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s0:
  2591. ID = Intrinsic::hexagon_M2_mmpyul_s0; break;
  2592. case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_s1:
  2593. ID = Intrinsic::hexagon_M2_mmpyul_s1; break;
  2594. case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s0:
  2595. ID = Intrinsic::hexagon_M2_mmpyuh_s0; break;
  2596. case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_s1:
  2597. ID = Intrinsic::hexagon_M2_mmpyuh_s1; break;
  2598. case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs0:
  2599. ID = Intrinsic::hexagon_M2_mmaculs_rs0; break;
  2600. case Hexagon::BI__builtin_HEXAGON_M2_mmaculs_rs1:
  2601. ID = Intrinsic::hexagon_M2_mmaculs_rs1; break;
  2602. case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs0:
  2603. ID = Intrinsic::hexagon_M2_mmacuhs_rs0; break;
  2604. case Hexagon::BI__builtin_HEXAGON_M2_mmacuhs_rs1:
  2605. ID = Intrinsic::hexagon_M2_mmacuhs_rs1; break;
  2606. case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs0:
  2607. ID = Intrinsic::hexagon_M2_mmpyul_rs0; break;
  2608. case Hexagon::BI__builtin_HEXAGON_M2_mmpyul_rs1:
  2609. ID = Intrinsic::hexagon_M2_mmpyul_rs1; break;
  2610. case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs0:
  2611. ID = Intrinsic::hexagon_M2_mmpyuh_rs0; break;
  2612. case Hexagon::BI__builtin_HEXAGON_M2_mmpyuh_rs1:
  2613. ID = Intrinsic::hexagon_M2_mmpyuh_rs1; break;
  2614. case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0:
  2615. ID = Intrinsic::hexagon_M2_vrcmaci_s0; break;
  2616. case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0:
  2617. ID = Intrinsic::hexagon_M2_vrcmacr_s0; break;
  2618. case Hexagon::BI__builtin_HEXAGON_M2_vrcmaci_s0c:
  2619. ID = Intrinsic::hexagon_M2_vrcmaci_s0c; break;
  2620. case Hexagon::BI__builtin_HEXAGON_M2_vrcmacr_s0c:
  2621. ID = Intrinsic::hexagon_M2_vrcmacr_s0c; break;
  2622. case Hexagon::BI__builtin_HEXAGON_M2_cmaci_s0:
  2623. ID = Intrinsic::hexagon_M2_cmaci_s0; break;
  2624. case Hexagon::BI__builtin_HEXAGON_M2_cmacr_s0:
  2625. ID = Intrinsic::hexagon_M2_cmacr_s0; break;
  2626. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0:
  2627. ID = Intrinsic::hexagon_M2_vrcmpyi_s0; break;
  2628. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0:
  2629. ID = Intrinsic::hexagon_M2_vrcmpyr_s0; break;
  2630. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyi_s0c:
  2631. ID = Intrinsic::hexagon_M2_vrcmpyi_s0c; break;
  2632. case Hexagon::BI__builtin_HEXAGON_M2_vrcmpyr_s0c:
  2633. ID = Intrinsic::hexagon_M2_vrcmpyr_s0c; break;
  2634. case Hexagon::BI__builtin_HEXAGON_M2_cmpyi_s0:
  2635. ID = Intrinsic::hexagon_M2_cmpyi_s0; break;
  2636. case Hexagon::BI__builtin_HEXAGON_M2_cmpyr_s0:
  2637. ID = Intrinsic::hexagon_M2_cmpyr_s0; break;
  2638. case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_i:
  2639. ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_i; break;
  2640. case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s0_sat_r:
  2641. ID = Intrinsic::hexagon_M2_vcmpy_s0_sat_r; break;
  2642. case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_i:
  2643. ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_i; break;
  2644. case Hexagon::BI__builtin_HEXAGON_M2_vcmpy_s1_sat_r:
  2645. ID = Intrinsic::hexagon_M2_vcmpy_s1_sat_r; break;
  2646. case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_i:
  2647. ID = Intrinsic::hexagon_M2_vcmac_s0_sat_i; break;
  2648. case Hexagon::BI__builtin_HEXAGON_M2_vcmac_s0_sat_r:
  2649. ID = Intrinsic::hexagon_M2_vcmac_s0_sat_r; break;
  2650. case Hexagon::BI__builtin_HEXAGON_S2_vcrotate:
  2651. ID = Intrinsic::hexagon_S2_vcrotate; break;
  2652. case Hexagon::BI__builtin_HEXAGON_A2_add:
  2653. ID = Intrinsic::hexagon_A2_add; break;
  2654. case Hexagon::BI__builtin_HEXAGON_A2_sub:
  2655. ID = Intrinsic::hexagon_A2_sub; break;
  2656. case Hexagon::BI__builtin_HEXAGON_A2_addsat:
  2657. ID = Intrinsic::hexagon_A2_addsat; break;
  2658. case Hexagon::BI__builtin_HEXAGON_A2_subsat:
  2659. ID = Intrinsic::hexagon_A2_subsat; break;
  2660. case Hexagon::BI__builtin_HEXAGON_A2_addi:
  2661. ID = Intrinsic::hexagon_A2_addi; break;
  2662. case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_ll:
  2663. ID = Intrinsic::hexagon_A2_addh_l16_ll; break;
  2664. case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_hl:
  2665. ID = Intrinsic::hexagon_A2_addh_l16_hl; break;
  2666. case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_ll:
  2667. ID = Intrinsic::hexagon_A2_addh_l16_sat_ll; break;
  2668. case Hexagon::BI__builtin_HEXAGON_A2_addh_l16_sat_hl:
  2669. ID = Intrinsic::hexagon_A2_addh_l16_sat_hl; break;
  2670. case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_ll:
  2671. ID = Intrinsic::hexagon_A2_subh_l16_ll; break;
  2672. case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_hl:
  2673. ID = Intrinsic::hexagon_A2_subh_l16_hl; break;
  2674. case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_ll:
  2675. ID = Intrinsic::hexagon_A2_subh_l16_sat_ll; break;
  2676. case Hexagon::BI__builtin_HEXAGON_A2_subh_l16_sat_hl:
  2677. ID = Intrinsic::hexagon_A2_subh_l16_sat_hl; break;
  2678. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_ll:
  2679. ID = Intrinsic::hexagon_A2_addh_h16_ll; break;
  2680. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_lh:
  2681. ID = Intrinsic::hexagon_A2_addh_h16_lh; break;
  2682. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hl:
  2683. ID = Intrinsic::hexagon_A2_addh_h16_hl; break;
  2684. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_hh:
  2685. ID = Intrinsic::hexagon_A2_addh_h16_hh; break;
  2686. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_ll:
  2687. ID = Intrinsic::hexagon_A2_addh_h16_sat_ll; break;
  2688. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_lh:
  2689. ID = Intrinsic::hexagon_A2_addh_h16_sat_lh; break;
  2690. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hl:
  2691. ID = Intrinsic::hexagon_A2_addh_h16_sat_hl; break;
  2692. case Hexagon::BI__builtin_HEXAGON_A2_addh_h16_sat_hh:
  2693. ID = Intrinsic::hexagon_A2_addh_h16_sat_hh; break;
  2694. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_ll:
  2695. ID = Intrinsic::hexagon_A2_subh_h16_ll; break;
  2696. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_lh:
  2697. ID = Intrinsic::hexagon_A2_subh_h16_lh; break;
  2698. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hl:
  2699. ID = Intrinsic::hexagon_A2_subh_h16_hl; break;
  2700. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_hh:
  2701. ID = Intrinsic::hexagon_A2_subh_h16_hh; break;
  2702. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_ll:
  2703. ID = Intrinsic::hexagon_A2_subh_h16_sat_ll; break;
  2704. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_lh:
  2705. ID = Intrinsic::hexagon_A2_subh_h16_sat_lh; break;
  2706. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hl:
  2707. ID = Intrinsic::hexagon_A2_subh_h16_sat_hl; break;
  2708. case Hexagon::BI__builtin_HEXAGON_A2_subh_h16_sat_hh:
  2709. ID = Intrinsic::hexagon_A2_subh_h16_sat_hh; break;
  2710. case Hexagon::BI__builtin_HEXAGON_A2_aslh:
  2711. ID = Intrinsic::hexagon_A2_aslh; break;
  2712. case Hexagon::BI__builtin_HEXAGON_A2_asrh:
  2713. ID = Intrinsic::hexagon_A2_asrh; break;
  2714. case Hexagon::BI__builtin_HEXAGON_A2_addp:
  2715. ID = Intrinsic::hexagon_A2_addp; break;
  2716. case Hexagon::BI__builtin_HEXAGON_A2_addpsat:
  2717. ID = Intrinsic::hexagon_A2_addpsat; break;
  2718. case Hexagon::BI__builtin_HEXAGON_A2_addsp:
  2719. ID = Intrinsic::hexagon_A2_addsp; break;
  2720. case Hexagon::BI__builtin_HEXAGON_A2_subp:
  2721. ID = Intrinsic::hexagon_A2_subp; break;
  2722. case Hexagon::BI__builtin_HEXAGON_A2_neg:
  2723. ID = Intrinsic::hexagon_A2_neg; break;
  2724. case Hexagon::BI__builtin_HEXAGON_A2_negsat:
  2725. ID = Intrinsic::hexagon_A2_negsat; break;
  2726. case Hexagon::BI__builtin_HEXAGON_A2_abs:
  2727. ID = Intrinsic::hexagon_A2_abs; break;
  2728. case Hexagon::BI__builtin_HEXAGON_A2_abssat:
  2729. ID = Intrinsic::hexagon_A2_abssat; break;
  2730. case Hexagon::BI__builtin_HEXAGON_A2_vconj:
  2731. ID = Intrinsic::hexagon_A2_vconj; break;
  2732. case Hexagon::BI__builtin_HEXAGON_A2_negp:
  2733. ID = Intrinsic::hexagon_A2_negp; break;
  2734. case Hexagon::BI__builtin_HEXAGON_A2_absp:
  2735. ID = Intrinsic::hexagon_A2_absp; break;
  2736. case Hexagon::BI__builtin_HEXAGON_A2_max:
  2737. ID = Intrinsic::hexagon_A2_max; break;
  2738. case Hexagon::BI__builtin_HEXAGON_A2_maxu:
  2739. ID = Intrinsic::hexagon_A2_maxu; break;
  2740. case Hexagon::BI__builtin_HEXAGON_A2_min:
  2741. ID = Intrinsic::hexagon_A2_min; break;
  2742. case Hexagon::BI__builtin_HEXAGON_A2_minu:
  2743. ID = Intrinsic::hexagon_A2_minu; break;
  2744. case Hexagon::BI__builtin_HEXAGON_A2_maxp:
  2745. ID = Intrinsic::hexagon_A2_maxp; break;
  2746. case Hexagon::BI__builtin_HEXAGON_A2_maxup:
  2747. ID = Intrinsic::hexagon_A2_maxup; break;
  2748. case Hexagon::BI__builtin_HEXAGON_A2_minp:
  2749. ID = Intrinsic::hexagon_A2_minp; break;
  2750. case Hexagon::BI__builtin_HEXAGON_A2_minup:
  2751. ID = Intrinsic::hexagon_A2_minup; break;
  2752. case Hexagon::BI__builtin_HEXAGON_A2_tfr:
  2753. ID = Intrinsic::hexagon_A2_tfr; break;
  2754. case Hexagon::BI__builtin_HEXAGON_A2_tfrsi:
  2755. ID = Intrinsic::hexagon_A2_tfrsi; break;
  2756. case Hexagon::BI__builtin_HEXAGON_A2_tfrp:
  2757. ID = Intrinsic::hexagon_A2_tfrp; break;
  2758. case Hexagon::BI__builtin_HEXAGON_A2_tfrpi:
  2759. ID = Intrinsic::hexagon_A2_tfrpi; break;
  2760. case Hexagon::BI__builtin_HEXAGON_A2_zxtb:
  2761. ID = Intrinsic::hexagon_A2_zxtb; break;
  2762. case Hexagon::BI__builtin_HEXAGON_A2_sxtb:
  2763. ID = Intrinsic::hexagon_A2_sxtb; break;
  2764. case Hexagon::BI__builtin_HEXAGON_A2_zxth:
  2765. ID = Intrinsic::hexagon_A2_zxth; break;
  2766. case Hexagon::BI__builtin_HEXAGON_A2_sxth:
  2767. ID = Intrinsic::hexagon_A2_sxth; break;
  2768. case Hexagon::BI__builtin_HEXAGON_A2_combinew:
  2769. ID = Intrinsic::hexagon_A2_combinew; break;
  2770. case Hexagon::BI__builtin_HEXAGON_A2_combineii:
  2771. ID = Intrinsic::hexagon_A2_combineii; break;
  2772. case Hexagon::BI__builtin_HEXAGON_A2_combine_hh:
  2773. ID = Intrinsic::hexagon_A2_combine_hh; break;
  2774. case Hexagon::BI__builtin_HEXAGON_A2_combine_hl:
  2775. ID = Intrinsic::hexagon_A2_combine_hl; break;
  2776. case Hexagon::BI__builtin_HEXAGON_A2_combine_lh:
  2777. ID = Intrinsic::hexagon_A2_combine_lh; break;
  2778. case Hexagon::BI__builtin_HEXAGON_A2_combine_ll:
  2779. ID = Intrinsic::hexagon_A2_combine_ll; break;
  2780. case Hexagon::BI__builtin_HEXAGON_A2_tfril:
  2781. ID = Intrinsic::hexagon_A2_tfril; break;
  2782. case Hexagon::BI__builtin_HEXAGON_A2_tfrih:
  2783. ID = Intrinsic::hexagon_A2_tfrih; break;
  2784. case Hexagon::BI__builtin_HEXAGON_A2_and:
  2785. ID = Intrinsic::hexagon_A2_and; break;
  2786. case Hexagon::BI__builtin_HEXAGON_A2_or:
  2787. ID = Intrinsic::hexagon_A2_or; break;
  2788. case Hexagon::BI__builtin_HEXAGON_A2_xor:
  2789. ID = Intrinsic::hexagon_A2_xor; break;
  2790. case Hexagon::BI__builtin_HEXAGON_A2_not:
  2791. ID = Intrinsic::hexagon_A2_not; break;
  2792. case Hexagon::BI__builtin_HEXAGON_M2_xor_xacc:
  2793. ID = Intrinsic::hexagon_M2_xor_xacc; break;
  2794. case Hexagon::BI__builtin_HEXAGON_A2_subri:
  2795. ID = Intrinsic::hexagon_A2_subri; break;
  2796. case Hexagon::BI__builtin_HEXAGON_A2_andir:
  2797. ID = Intrinsic::hexagon_A2_andir; break;
  2798. case Hexagon::BI__builtin_HEXAGON_A2_orir:
  2799. ID = Intrinsic::hexagon_A2_orir; break;
  2800. case Hexagon::BI__builtin_HEXAGON_A2_andp:
  2801. ID = Intrinsic::hexagon_A2_andp; break;
  2802. case Hexagon::BI__builtin_HEXAGON_A2_orp:
  2803. ID = Intrinsic::hexagon_A2_orp; break;
  2804. case Hexagon::BI__builtin_HEXAGON_A2_xorp:
  2805. ID = Intrinsic::hexagon_A2_xorp; break;
  2806. case Hexagon::BI__builtin_HEXAGON_A2_notp:
  2807. ID = Intrinsic::hexagon_A2_notp; break;
  2808. case Hexagon::BI__builtin_HEXAGON_A2_sxtw:
  2809. ID = Intrinsic::hexagon_A2_sxtw; break;
  2810. case Hexagon::BI__builtin_HEXAGON_A2_sat:
  2811. ID = Intrinsic::hexagon_A2_sat; break;
  2812. case Hexagon::BI__builtin_HEXAGON_A2_sath:
  2813. ID = Intrinsic::hexagon_A2_sath; break;
  2814. case Hexagon::BI__builtin_HEXAGON_A2_satuh:
  2815. ID = Intrinsic::hexagon_A2_satuh; break;
  2816. case Hexagon::BI__builtin_HEXAGON_A2_satub:
  2817. ID = Intrinsic::hexagon_A2_satub; break;
  2818. case Hexagon::BI__builtin_HEXAGON_A2_satb:
  2819. ID = Intrinsic::hexagon_A2_satb; break;
  2820. case Hexagon::BI__builtin_HEXAGON_A2_vaddub:
  2821. ID = Intrinsic::hexagon_A2_vaddub; break;
  2822. case Hexagon::BI__builtin_HEXAGON_A2_vaddubs:
  2823. ID = Intrinsic::hexagon_A2_vaddubs; break;
  2824. case Hexagon::BI__builtin_HEXAGON_A2_vaddh:
  2825. ID = Intrinsic::hexagon_A2_vaddh; break;
  2826. case Hexagon::BI__builtin_HEXAGON_A2_vaddhs:
  2827. ID = Intrinsic::hexagon_A2_vaddhs; break;
  2828. case Hexagon::BI__builtin_HEXAGON_A2_vadduhs:
  2829. ID = Intrinsic::hexagon_A2_vadduhs; break;
  2830. case Hexagon::BI__builtin_HEXAGON_A2_vaddw:
  2831. ID = Intrinsic::hexagon_A2_vaddw; break;
  2832. case Hexagon::BI__builtin_HEXAGON_A2_vaddws:
  2833. ID = Intrinsic::hexagon_A2_vaddws; break;
  2834. case Hexagon::BI__builtin_HEXAGON_A2_svavgh:
  2835. ID = Intrinsic::hexagon_A2_svavgh; break;
  2836. case Hexagon::BI__builtin_HEXAGON_A2_svavghs:
  2837. ID = Intrinsic::hexagon_A2_svavghs; break;
  2838. case Hexagon::BI__builtin_HEXAGON_A2_svnavgh:
  2839. ID = Intrinsic::hexagon_A2_svnavgh; break;
  2840. case Hexagon::BI__builtin_HEXAGON_A2_svaddh:
  2841. ID = Intrinsic::hexagon_A2_svaddh; break;
  2842. case Hexagon::BI__builtin_HEXAGON_A2_svaddhs:
  2843. ID = Intrinsic::hexagon_A2_svaddhs; break;
  2844. case Hexagon::BI__builtin_HEXAGON_A2_svadduhs:
  2845. ID = Intrinsic::hexagon_A2_svadduhs; break;
  2846. case Hexagon::BI__builtin_HEXAGON_A2_svsubh:
  2847. ID = Intrinsic::hexagon_A2_svsubh; break;
  2848. case Hexagon::BI__builtin_HEXAGON_A2_svsubhs:
  2849. ID = Intrinsic::hexagon_A2_svsubhs; break;
  2850. case Hexagon::BI__builtin_HEXAGON_A2_svsubuhs:
  2851. ID = Intrinsic::hexagon_A2_svsubuhs; break;
  2852. case Hexagon::BI__builtin_HEXAGON_A2_vraddub:
  2853. ID = Intrinsic::hexagon_A2_vraddub; break;
  2854. case Hexagon::BI__builtin_HEXAGON_A2_vraddub_acc:
  2855. ID = Intrinsic::hexagon_A2_vraddub_acc; break;
  2856. case Hexagon::BI__builtin_HEXAGON_M2_vradduh:
  2857. ID = Intrinsic::hexagon_M2_vradduh; break;
  2858. case Hexagon::BI__builtin_HEXAGON_A2_vsubub:
  2859. ID = Intrinsic::hexagon_A2_vsubub; break;
  2860. case Hexagon::BI__builtin_HEXAGON_A2_vsububs:
  2861. ID = Intrinsic::hexagon_A2_vsububs; break;
  2862. case Hexagon::BI__builtin_HEXAGON_A2_vsubh:
  2863. ID = Intrinsic::hexagon_A2_vsubh; break;
  2864. case Hexagon::BI__builtin_HEXAGON_A2_vsubhs:
  2865. ID = Intrinsic::hexagon_A2_vsubhs; break;
  2866. case Hexagon::BI__builtin_HEXAGON_A2_vsubuhs:
  2867. ID = Intrinsic::hexagon_A2_vsubuhs; break;
  2868. case Hexagon::BI__builtin_HEXAGON_A2_vsubw:
  2869. ID = Intrinsic::hexagon_A2_vsubw; break;
  2870. case Hexagon::BI__builtin_HEXAGON_A2_vsubws:
  2871. ID = Intrinsic::hexagon_A2_vsubws; break;
  2872. case Hexagon::BI__builtin_HEXAGON_A2_vabsh:
  2873. ID = Intrinsic::hexagon_A2_vabsh; break;
  2874. case Hexagon::BI__builtin_HEXAGON_A2_vabshsat:
  2875. ID = Intrinsic::hexagon_A2_vabshsat; break;
  2876. case Hexagon::BI__builtin_HEXAGON_A2_vabsw:
  2877. ID = Intrinsic::hexagon_A2_vabsw; break;
  2878. case Hexagon::BI__builtin_HEXAGON_A2_vabswsat:
  2879. ID = Intrinsic::hexagon_A2_vabswsat; break;
  2880. case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffw:
  2881. ID = Intrinsic::hexagon_M2_vabsdiffw; break;
  2882. case Hexagon::BI__builtin_HEXAGON_M2_vabsdiffh:
  2883. ID = Intrinsic::hexagon_M2_vabsdiffh; break;
  2884. case Hexagon::BI__builtin_HEXAGON_A2_vrsadub:
  2885. ID = Intrinsic::hexagon_A2_vrsadub; break;
  2886. case Hexagon::BI__builtin_HEXAGON_A2_vrsadub_acc:
  2887. ID = Intrinsic::hexagon_A2_vrsadub_acc; break;
  2888. case Hexagon::BI__builtin_HEXAGON_A2_vavgub:
  2889. ID = Intrinsic::hexagon_A2_vavgub; break;
  2890. case Hexagon::BI__builtin_HEXAGON_A2_vavguh:
  2891. ID = Intrinsic::hexagon_A2_vavguh; break;
  2892. case Hexagon::BI__builtin_HEXAGON_A2_vavgh:
  2893. ID = Intrinsic::hexagon_A2_vavgh; break;
  2894. case Hexagon::BI__builtin_HEXAGON_A2_vnavgh:
  2895. ID = Intrinsic::hexagon_A2_vnavgh; break;
  2896. case Hexagon::BI__builtin_HEXAGON_A2_vavgw:
  2897. ID = Intrinsic::hexagon_A2_vavgw; break;
  2898. case Hexagon::BI__builtin_HEXAGON_A2_vnavgw:
  2899. ID = Intrinsic::hexagon_A2_vnavgw; break;
  2900. case Hexagon::BI__builtin_HEXAGON_A2_vavgwr:
  2901. ID = Intrinsic::hexagon_A2_vavgwr; break;
  2902. case Hexagon::BI__builtin_HEXAGON_A2_vnavgwr:
  2903. ID = Intrinsic::hexagon_A2_vnavgwr; break;
  2904. case Hexagon::BI__builtin_HEXAGON_A2_vavgwcr:
  2905. ID = Intrinsic::hexagon_A2_vavgwcr; break;
  2906. case Hexagon::BI__builtin_HEXAGON_A2_vnavgwcr:
  2907. ID = Intrinsic::hexagon_A2_vnavgwcr; break;
  2908. case Hexagon::BI__builtin_HEXAGON_A2_vavghcr:
  2909. ID = Intrinsic::hexagon_A2_vavghcr; break;
  2910. case Hexagon::BI__builtin_HEXAGON_A2_vnavghcr:
  2911. ID = Intrinsic::hexagon_A2_vnavghcr; break;
  2912. case Hexagon::BI__builtin_HEXAGON_A2_vavguw:
  2913. ID = Intrinsic::hexagon_A2_vavguw; break;
  2914. case Hexagon::BI__builtin_HEXAGON_A2_vavguwr:
  2915. ID = Intrinsic::hexagon_A2_vavguwr; break;
  2916. case Hexagon::BI__builtin_HEXAGON_A2_vavgubr:
  2917. ID = Intrinsic::hexagon_A2_vavgubr; break;
  2918. case Hexagon::BI__builtin_HEXAGON_A2_vavguhr:
  2919. ID = Intrinsic::hexagon_A2_vavguhr; break;
  2920. case Hexagon::BI__builtin_HEXAGON_A2_vavghr:
  2921. ID = Intrinsic::hexagon_A2_vavghr; break;
  2922. case Hexagon::BI__builtin_HEXAGON_A2_vnavghr:
  2923. ID = Intrinsic::hexagon_A2_vnavghr; break;
  2924. case Hexagon::BI__builtin_HEXAGON_A2_vminh:
  2925. ID = Intrinsic::hexagon_A2_vminh; break;
  2926. case Hexagon::BI__builtin_HEXAGON_A2_vmaxh:
  2927. ID = Intrinsic::hexagon_A2_vmaxh; break;
  2928. case Hexagon::BI__builtin_HEXAGON_A2_vminub:
  2929. ID = Intrinsic::hexagon_A2_vminub; break;
  2930. case Hexagon::BI__builtin_HEXAGON_A2_vmaxub:
  2931. ID = Intrinsic::hexagon_A2_vmaxub; break;
  2932. case Hexagon::BI__builtin_HEXAGON_A2_vminuh:
  2933. ID = Intrinsic::hexagon_A2_vminuh; break;
  2934. case Hexagon::BI__builtin_HEXAGON_A2_vmaxuh:
  2935. ID = Intrinsic::hexagon_A2_vmaxuh; break;
  2936. case Hexagon::BI__builtin_HEXAGON_A2_vminw:
  2937. ID = Intrinsic::hexagon_A2_vminw; break;
  2938. case Hexagon::BI__builtin_HEXAGON_A2_vmaxw:
  2939. ID = Intrinsic::hexagon_A2_vmaxw; break;
  2940. case Hexagon::BI__builtin_HEXAGON_A2_vminuw:
  2941. ID = Intrinsic::hexagon_A2_vminuw; break;
  2942. case Hexagon::BI__builtin_HEXAGON_A2_vmaxuw:
  2943. ID = Intrinsic::hexagon_A2_vmaxuw; break;
  2944. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r:
  2945. ID = Intrinsic::hexagon_S2_asr_r_r; break;
  2946. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r:
  2947. ID = Intrinsic::hexagon_S2_asl_r_r; break;
  2948. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r:
  2949. ID = Intrinsic::hexagon_S2_lsr_r_r; break;
  2950. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r:
  2951. ID = Intrinsic::hexagon_S2_lsl_r_r; break;
  2952. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p:
  2953. ID = Intrinsic::hexagon_S2_asr_r_p; break;
  2954. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p:
  2955. ID = Intrinsic::hexagon_S2_asl_r_p; break;
  2956. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p:
  2957. ID = Intrinsic::hexagon_S2_lsr_r_p; break;
  2958. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p:
  2959. ID = Intrinsic::hexagon_S2_lsl_r_p; break;
  2960. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_acc:
  2961. ID = Intrinsic::hexagon_S2_asr_r_r_acc; break;
  2962. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_acc:
  2963. ID = Intrinsic::hexagon_S2_asl_r_r_acc; break;
  2964. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_acc:
  2965. ID = Intrinsic::hexagon_S2_lsr_r_r_acc; break;
  2966. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_acc:
  2967. ID = Intrinsic::hexagon_S2_lsl_r_r_acc; break;
  2968. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_acc:
  2969. ID = Intrinsic::hexagon_S2_asr_r_p_acc; break;
  2970. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_acc:
  2971. ID = Intrinsic::hexagon_S2_asl_r_p_acc; break;
  2972. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_acc:
  2973. ID = Intrinsic::hexagon_S2_lsr_r_p_acc; break;
  2974. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_acc:
  2975. ID = Intrinsic::hexagon_S2_lsl_r_p_acc; break;
  2976. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_nac:
  2977. ID = Intrinsic::hexagon_S2_asr_r_r_nac; break;
  2978. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_nac:
  2979. ID = Intrinsic::hexagon_S2_asl_r_r_nac; break;
  2980. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_nac:
  2981. ID = Intrinsic::hexagon_S2_lsr_r_r_nac; break;
  2982. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_nac:
  2983. ID = Intrinsic::hexagon_S2_lsl_r_r_nac; break;
  2984. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_nac:
  2985. ID = Intrinsic::hexagon_S2_asr_r_p_nac; break;
  2986. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_nac:
  2987. ID = Intrinsic::hexagon_S2_asl_r_p_nac; break;
  2988. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_nac:
  2989. ID = Intrinsic::hexagon_S2_lsr_r_p_nac; break;
  2990. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_nac:
  2991. ID = Intrinsic::hexagon_S2_lsl_r_p_nac; break;
  2992. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_and:
  2993. ID = Intrinsic::hexagon_S2_asr_r_r_and; break;
  2994. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_and:
  2995. ID = Intrinsic::hexagon_S2_asl_r_r_and; break;
  2996. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_and:
  2997. ID = Intrinsic::hexagon_S2_lsr_r_r_and; break;
  2998. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_and:
  2999. ID = Intrinsic::hexagon_S2_lsl_r_r_and; break;
  3000. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_or:
  3001. ID = Intrinsic::hexagon_S2_asr_r_r_or; break;
  3002. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_or:
  3003. ID = Intrinsic::hexagon_S2_asl_r_r_or; break;
  3004. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_r_or:
  3005. ID = Intrinsic::hexagon_S2_lsr_r_r_or; break;
  3006. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_r_or:
  3007. ID = Intrinsic::hexagon_S2_lsl_r_r_or; break;
  3008. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_and:
  3009. ID = Intrinsic::hexagon_S2_asr_r_p_and; break;
  3010. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_and:
  3011. ID = Intrinsic::hexagon_S2_asl_r_p_and; break;
  3012. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_and:
  3013. ID = Intrinsic::hexagon_S2_lsr_r_p_and; break;
  3014. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_and:
  3015. ID = Intrinsic::hexagon_S2_lsl_r_p_and; break;
  3016. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_p_or:
  3017. ID = Intrinsic::hexagon_S2_asr_r_p_or; break;
  3018. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_p_or:
  3019. ID = Intrinsic::hexagon_S2_asl_r_p_or; break;
  3020. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_p_or:
  3021. ID = Intrinsic::hexagon_S2_lsr_r_p_or; break;
  3022. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_p_or:
  3023. ID = Intrinsic::hexagon_S2_lsl_r_p_or; break;
  3024. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_r_sat:
  3025. ID = Intrinsic::hexagon_S2_asr_r_r_sat; break;
  3026. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_r_sat:
  3027. ID = Intrinsic::hexagon_S2_asl_r_r_sat; break;
  3028. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r:
  3029. ID = Intrinsic::hexagon_S2_asr_i_r; break;
  3030. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r:
  3031. ID = Intrinsic::hexagon_S2_lsr_i_r; break;
  3032. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r:
  3033. ID = Intrinsic::hexagon_S2_asl_i_r; break;
  3034. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p:
  3035. ID = Intrinsic::hexagon_S2_asr_i_p; break;
  3036. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p:
  3037. ID = Intrinsic::hexagon_S2_lsr_i_p; break;
  3038. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p:
  3039. ID = Intrinsic::hexagon_S2_asl_i_p; break;
  3040. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_acc:
  3041. ID = Intrinsic::hexagon_S2_asr_i_r_acc; break;
  3042. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_acc:
  3043. ID = Intrinsic::hexagon_S2_lsr_i_r_acc; break;
  3044. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_acc:
  3045. ID = Intrinsic::hexagon_S2_asl_i_r_acc; break;
  3046. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_acc:
  3047. ID = Intrinsic::hexagon_S2_asr_i_p_acc; break;
  3048. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_acc:
  3049. ID = Intrinsic::hexagon_S2_lsr_i_p_acc; break;
  3050. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_acc:
  3051. ID = Intrinsic::hexagon_S2_asl_i_p_acc; break;
  3052. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_nac:
  3053. ID = Intrinsic::hexagon_S2_asr_i_r_nac; break;
  3054. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_nac:
  3055. ID = Intrinsic::hexagon_S2_lsr_i_r_nac; break;
  3056. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_nac:
  3057. ID = Intrinsic::hexagon_S2_asl_i_r_nac; break;
  3058. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_nac:
  3059. ID = Intrinsic::hexagon_S2_asr_i_p_nac; break;
  3060. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_nac:
  3061. ID = Intrinsic::hexagon_S2_lsr_i_p_nac; break;
  3062. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_nac:
  3063. ID = Intrinsic::hexagon_S2_asl_i_p_nac; break;
  3064. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_xacc:
  3065. ID = Intrinsic::hexagon_S2_lsr_i_r_xacc; break;
  3066. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_xacc:
  3067. ID = Intrinsic::hexagon_S2_asl_i_r_xacc; break;
  3068. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_xacc:
  3069. ID = Intrinsic::hexagon_S2_lsr_i_p_xacc; break;
  3070. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_xacc:
  3071. ID = Intrinsic::hexagon_S2_asl_i_p_xacc; break;
  3072. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_and:
  3073. ID = Intrinsic::hexagon_S2_asr_i_r_and; break;
  3074. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_and:
  3075. ID = Intrinsic::hexagon_S2_lsr_i_r_and; break;
  3076. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_and:
  3077. ID = Intrinsic::hexagon_S2_asl_i_r_and; break;
  3078. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_or:
  3079. ID = Intrinsic::hexagon_S2_asr_i_r_or; break;
  3080. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_r_or:
  3081. ID = Intrinsic::hexagon_S2_lsr_i_r_or; break;
  3082. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_or:
  3083. ID = Intrinsic::hexagon_S2_asl_i_r_or; break;
  3084. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_and:
  3085. ID = Intrinsic::hexagon_S2_asr_i_p_and; break;
  3086. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_and:
  3087. ID = Intrinsic::hexagon_S2_lsr_i_p_and; break;
  3088. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_and:
  3089. ID = Intrinsic::hexagon_S2_asl_i_p_and; break;
  3090. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_p_or:
  3091. ID = Intrinsic::hexagon_S2_asr_i_p_or; break;
  3092. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_p_or:
  3093. ID = Intrinsic::hexagon_S2_lsr_i_p_or; break;
  3094. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_p_or:
  3095. ID = Intrinsic::hexagon_S2_asl_i_p_or; break;
  3096. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_r_sat:
  3097. ID = Intrinsic::hexagon_S2_asl_i_r_sat; break;
  3098. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd:
  3099. ID = Intrinsic::hexagon_S2_asr_i_r_rnd; break;
  3100. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_r_rnd_goodsyntax:
  3101. ID = Intrinsic::hexagon_S2_asr_i_r_rnd_goodsyntax; break;
  3102. case Hexagon::BI__builtin_HEXAGON_S2_addasl_rrri:
  3103. ID = Intrinsic::hexagon_S2_addasl_rrri; break;
  3104. case Hexagon::BI__builtin_HEXAGON_S2_valignib:
  3105. ID = Intrinsic::hexagon_S2_valignib; break;
  3106. case Hexagon::BI__builtin_HEXAGON_S2_valignrb:
  3107. ID = Intrinsic::hexagon_S2_valignrb; break;
  3108. case Hexagon::BI__builtin_HEXAGON_S2_vspliceib:
  3109. ID = Intrinsic::hexagon_S2_vspliceib; break;
  3110. case Hexagon::BI__builtin_HEXAGON_S2_vsplicerb:
  3111. ID = Intrinsic::hexagon_S2_vsplicerb; break;
  3112. case Hexagon::BI__builtin_HEXAGON_S2_vsplatrh:
  3113. ID = Intrinsic::hexagon_S2_vsplatrh; break;
  3114. case Hexagon::BI__builtin_HEXAGON_S2_vsplatrb:
  3115. ID = Intrinsic::hexagon_S2_vsplatrb; break;
  3116. case Hexagon::BI__builtin_HEXAGON_S2_insert:
  3117. ID = Intrinsic::hexagon_S2_insert; break;
  3118. case Hexagon::BI__builtin_HEXAGON_S2_tableidxb_goodsyntax:
  3119. ID = Intrinsic::hexagon_S2_tableidxb_goodsyntax; break;
  3120. case Hexagon::BI__builtin_HEXAGON_S2_tableidxh_goodsyntax:
  3121. ID = Intrinsic::hexagon_S2_tableidxh_goodsyntax; break;
  3122. case Hexagon::BI__builtin_HEXAGON_S2_tableidxw_goodsyntax:
  3123. ID = Intrinsic::hexagon_S2_tableidxw_goodsyntax; break;
  3124. case Hexagon::BI__builtin_HEXAGON_S2_tableidxd_goodsyntax:
  3125. ID = Intrinsic::hexagon_S2_tableidxd_goodsyntax; break;
  3126. case Hexagon::BI__builtin_HEXAGON_S2_extractu:
  3127. ID = Intrinsic::hexagon_S2_extractu; break;
  3128. case Hexagon::BI__builtin_HEXAGON_S2_insertp:
  3129. ID = Intrinsic::hexagon_S2_insertp; break;
  3130. case Hexagon::BI__builtin_HEXAGON_S2_extractup:
  3131. ID = Intrinsic::hexagon_S2_extractup; break;
  3132. case Hexagon::BI__builtin_HEXAGON_S2_insert_rp:
  3133. ID = Intrinsic::hexagon_S2_insert_rp; break;
  3134. case Hexagon::BI__builtin_HEXAGON_S2_extractu_rp:
  3135. ID = Intrinsic::hexagon_S2_extractu_rp; break;
  3136. case Hexagon::BI__builtin_HEXAGON_S2_insertp_rp:
  3137. ID = Intrinsic::hexagon_S2_insertp_rp; break;
  3138. case Hexagon::BI__builtin_HEXAGON_S2_extractup_rp:
  3139. ID = Intrinsic::hexagon_S2_extractup_rp; break;
  3140. case Hexagon::BI__builtin_HEXAGON_S2_tstbit_i:
  3141. ID = Intrinsic::hexagon_S2_tstbit_i; break;
  3142. case Hexagon::BI__builtin_HEXAGON_S2_setbit_i:
  3143. ID = Intrinsic::hexagon_S2_setbit_i; break;
  3144. case Hexagon::BI__builtin_HEXAGON_S2_togglebit_i:
  3145. ID = Intrinsic::hexagon_S2_togglebit_i; break;
  3146. case Hexagon::BI__builtin_HEXAGON_S2_clrbit_i:
  3147. ID = Intrinsic::hexagon_S2_clrbit_i; break;
  3148. case Hexagon::BI__builtin_HEXAGON_S2_tstbit_r:
  3149. ID = Intrinsic::hexagon_S2_tstbit_r; break;
  3150. case Hexagon::BI__builtin_HEXAGON_S2_setbit_r:
  3151. ID = Intrinsic::hexagon_S2_setbit_r; break;
  3152. case Hexagon::BI__builtin_HEXAGON_S2_togglebit_r:
  3153. ID = Intrinsic::hexagon_S2_togglebit_r; break;
  3154. case Hexagon::BI__builtin_HEXAGON_S2_clrbit_r:
  3155. ID = Intrinsic::hexagon_S2_clrbit_r; break;
  3156. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vh:
  3157. ID = Intrinsic::hexagon_S2_asr_i_vh; break;
  3158. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vh:
  3159. ID = Intrinsic::hexagon_S2_lsr_i_vh; break;
  3160. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vh:
  3161. ID = Intrinsic::hexagon_S2_asl_i_vh; break;
  3162. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vh:
  3163. ID = Intrinsic::hexagon_S2_asr_r_vh; break;
  3164. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vh:
  3165. ID = Intrinsic::hexagon_S2_asl_r_vh; break;
  3166. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vh:
  3167. ID = Intrinsic::hexagon_S2_lsr_r_vh; break;
  3168. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vh:
  3169. ID = Intrinsic::hexagon_S2_lsl_r_vh; break;
  3170. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_vw:
  3171. ID = Intrinsic::hexagon_S2_asr_i_vw; break;
  3172. case Hexagon::BI__builtin_HEXAGON_S2_asr_i_svw_trun:
  3173. ID = Intrinsic::hexagon_S2_asr_i_svw_trun; break;
  3174. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_svw_trun:
  3175. ID = Intrinsic::hexagon_S2_asr_r_svw_trun; break;
  3176. case Hexagon::BI__builtin_HEXAGON_S2_lsr_i_vw:
  3177. ID = Intrinsic::hexagon_S2_lsr_i_vw; break;
  3178. case Hexagon::BI__builtin_HEXAGON_S2_asl_i_vw:
  3179. ID = Intrinsic::hexagon_S2_asl_i_vw; break;
  3180. case Hexagon::BI__builtin_HEXAGON_S2_asr_r_vw:
  3181. ID = Intrinsic::hexagon_S2_asr_r_vw; break;
  3182. case Hexagon::BI__builtin_HEXAGON_S2_asl_r_vw:
  3183. ID = Intrinsic::hexagon_S2_asl_r_vw; break;
  3184. case Hexagon::BI__builtin_HEXAGON_S2_lsr_r_vw:
  3185. ID = Intrinsic::hexagon_S2_lsr_r_vw; break;
  3186. case Hexagon::BI__builtin_HEXAGON_S2_lsl_r_vw:
  3187. ID = Intrinsic::hexagon_S2_lsl_r_vw; break;
  3188. case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwh:
  3189. ID = Intrinsic::hexagon_S2_vrndpackwh; break;
  3190. case Hexagon::BI__builtin_HEXAGON_S2_vrndpackwhs:
  3191. ID = Intrinsic::hexagon_S2_vrndpackwhs; break;
  3192. case Hexagon::BI__builtin_HEXAGON_S2_vsxtbh:
  3193. ID = Intrinsic::hexagon_S2_vsxtbh; break;
  3194. case Hexagon::BI__builtin_HEXAGON_S2_vzxtbh:
  3195. ID = Intrinsic::hexagon_S2_vzxtbh; break;
  3196. case Hexagon::BI__builtin_HEXAGON_S2_vsathub:
  3197. ID = Intrinsic::hexagon_S2_vsathub; break;
  3198. case Hexagon::BI__builtin_HEXAGON_S2_svsathub:
  3199. ID = Intrinsic::hexagon_S2_svsathub; break;
  3200. case Hexagon::BI__builtin_HEXAGON_S2_svsathb:
  3201. ID = Intrinsic::hexagon_S2_svsathb; break;
  3202. case Hexagon::BI__builtin_HEXAGON_S2_vsathb:
  3203. ID = Intrinsic::hexagon_S2_vsathb; break;
  3204. case Hexagon::BI__builtin_HEXAGON_S2_vtrunohb:
  3205. ID = Intrinsic::hexagon_S2_vtrunohb; break;
  3206. case Hexagon::BI__builtin_HEXAGON_S2_vtrunewh:
  3207. ID = Intrinsic::hexagon_S2_vtrunewh; break;
  3208. case Hexagon::BI__builtin_HEXAGON_S2_vtrunowh:
  3209. ID = Intrinsic::hexagon_S2_vtrunowh; break;
  3210. case Hexagon::BI__builtin_HEXAGON_S2_vtrunehb:
  3211. ID = Intrinsic::hexagon_S2_vtrunehb; break;
  3212. case Hexagon::BI__builtin_HEXAGON_S2_vsxthw:
  3213. ID = Intrinsic::hexagon_S2_vsxthw; break;
  3214. case Hexagon::BI__builtin_HEXAGON_S2_vzxthw:
  3215. ID = Intrinsic::hexagon_S2_vzxthw; break;
  3216. case Hexagon::BI__builtin_HEXAGON_S2_vsatwh:
  3217. ID = Intrinsic::hexagon_S2_vsatwh; break;
  3218. case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh:
  3219. ID = Intrinsic::hexagon_S2_vsatwuh; break;
  3220. case Hexagon::BI__builtin_HEXAGON_S2_packhl:
  3221. ID = Intrinsic::hexagon_S2_packhl; break;
  3222. case Hexagon::BI__builtin_HEXAGON_A2_swiz:
  3223. ID = Intrinsic::hexagon_A2_swiz; break;
  3224. case Hexagon::BI__builtin_HEXAGON_S2_vsathub_nopack:
  3225. ID = Intrinsic::hexagon_S2_vsathub_nopack; break;
  3226. case Hexagon::BI__builtin_HEXAGON_S2_vsathb_nopack:
  3227. ID = Intrinsic::hexagon_S2_vsathb_nopack; break;
  3228. case Hexagon::BI__builtin_HEXAGON_S2_vsatwh_nopack:
  3229. ID = Intrinsic::hexagon_S2_vsatwh_nopack; break;
  3230. case Hexagon::BI__builtin_HEXAGON_S2_vsatwuh_nopack:
  3231. ID = Intrinsic::hexagon_S2_vsatwuh_nopack; break;
  3232. case Hexagon::BI__builtin_HEXAGON_S2_shuffob:
  3233. ID = Intrinsic::hexagon_S2_shuffob; break;
  3234. case Hexagon::BI__builtin_HEXAGON_S2_shuffeb:
  3235. ID = Intrinsic::hexagon_S2_shuffeb; break;
  3236. case Hexagon::BI__builtin_HEXAGON_S2_shuffoh:
  3237. ID = Intrinsic::hexagon_S2_shuffoh; break;
  3238. case Hexagon::BI__builtin_HEXAGON_S2_shuffeh:
  3239. ID = Intrinsic::hexagon_S2_shuffeh; break;
  3240. case Hexagon::BI__builtin_HEXAGON_S2_parityp:
  3241. ID = Intrinsic::hexagon_S2_parityp; break;
  3242. case Hexagon::BI__builtin_HEXAGON_S2_lfsp:
  3243. ID = Intrinsic::hexagon_S2_lfsp; break;
  3244. case Hexagon::BI__builtin_HEXAGON_S2_clbnorm:
  3245. ID = Intrinsic::hexagon_S2_clbnorm; break;
  3246. case Hexagon::BI__builtin_HEXAGON_S2_clb:
  3247. ID = Intrinsic::hexagon_S2_clb; break;
  3248. case Hexagon::BI__builtin_HEXAGON_S2_cl0:
  3249. ID = Intrinsic::hexagon_S2_cl0; break;
  3250. case Hexagon::BI__builtin_HEXAGON_S2_cl1:
  3251. ID = Intrinsic::hexagon_S2_cl1; break;
  3252. case Hexagon::BI__builtin_HEXAGON_S2_clbp:
  3253. ID = Intrinsic::hexagon_S2_clbp; break;
  3254. case Hexagon::BI__builtin_HEXAGON_S2_cl0p:
  3255. ID = Intrinsic::hexagon_S2_cl0p; break;
  3256. case Hexagon::BI__builtin_HEXAGON_S2_cl1p:
  3257. ID = Intrinsic::hexagon_S2_cl1p; break;
  3258. case Hexagon::BI__builtin_HEXAGON_S2_brev:
  3259. ID = Intrinsic::hexagon_S2_brev; break;
  3260. case Hexagon::BI__builtin_HEXAGON_S2_ct0:
  3261. ID = Intrinsic::hexagon_S2_ct0; break;
  3262. case Hexagon::BI__builtin_HEXAGON_S2_ct1:
  3263. ID = Intrinsic::hexagon_S2_ct1; break;
  3264. case Hexagon::BI__builtin_HEXAGON_S2_interleave:
  3265. ID = Intrinsic::hexagon_S2_interleave; break;
  3266. case Hexagon::BI__builtin_HEXAGON_S2_deinterleave:
  3267. ID = Intrinsic::hexagon_S2_deinterleave; break;
  3268. case Hexagon::BI__builtin_SI_to_SXTHI_asrh:
  3269. ID = Intrinsic::hexagon_SI_to_SXTHI_asrh; break;
  3270. case Hexagon::BI__builtin_HEXAGON_A4_orn:
  3271. ID = Intrinsic::hexagon_A4_orn; break;
  3272. case Hexagon::BI__builtin_HEXAGON_A4_andn:
  3273. ID = Intrinsic::hexagon_A4_andn; break;
  3274. case Hexagon::BI__builtin_HEXAGON_A4_ornp:
  3275. ID = Intrinsic::hexagon_A4_ornp; break;
  3276. case Hexagon::BI__builtin_HEXAGON_A4_andnp:
  3277. ID = Intrinsic::hexagon_A4_andnp; break;
  3278. case Hexagon::BI__builtin_HEXAGON_A4_combineir:
  3279. ID = Intrinsic::hexagon_A4_combineir; break;
  3280. case Hexagon::BI__builtin_HEXAGON_A4_combineri:
  3281. ID = Intrinsic::hexagon_A4_combineri; break;
  3282. case Hexagon::BI__builtin_HEXAGON_C4_cmpneqi:
  3283. ID = Intrinsic::hexagon_C4_cmpneqi; break;
  3284. case Hexagon::BI__builtin_HEXAGON_C4_cmpneq:
  3285. ID = Intrinsic::hexagon_C4_cmpneq; break;
  3286. case Hexagon::BI__builtin_HEXAGON_C4_cmpltei:
  3287. ID = Intrinsic::hexagon_C4_cmpltei; break;
  3288. case Hexagon::BI__builtin_HEXAGON_C4_cmplte:
  3289. ID = Intrinsic::hexagon_C4_cmplte; break;
  3290. case Hexagon::BI__builtin_HEXAGON_C4_cmplteui:
  3291. ID = Intrinsic::hexagon_C4_cmplteui; break;
  3292. case Hexagon::BI__builtin_HEXAGON_C4_cmplteu:
  3293. ID = Intrinsic::hexagon_C4_cmplteu; break;
  3294. case Hexagon::BI__builtin_HEXAGON_A4_rcmpneq:
  3295. ID = Intrinsic::hexagon_A4_rcmpneq; break;
  3296. case Hexagon::BI__builtin_HEXAGON_A4_rcmpneqi:
  3297. ID = Intrinsic::hexagon_A4_rcmpneqi; break;
  3298. case Hexagon::BI__builtin_HEXAGON_A4_rcmpeq:
  3299. ID = Intrinsic::hexagon_A4_rcmpeq; break;
  3300. case Hexagon::BI__builtin_HEXAGON_A4_rcmpeqi:
  3301. ID = Intrinsic::hexagon_A4_rcmpeqi; break;
  3302. case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9:
  3303. ID = Intrinsic::hexagon_C4_fastcorner9; break;
  3304. case Hexagon::BI__builtin_HEXAGON_C4_fastcorner9_not:
  3305. ID = Intrinsic::hexagon_C4_fastcorner9_not; break;
  3306. case Hexagon::BI__builtin_HEXAGON_C4_and_andn:
  3307. ID = Intrinsic::hexagon_C4_and_andn; break;
  3308. case Hexagon::BI__builtin_HEXAGON_C4_and_and:
  3309. ID = Intrinsic::hexagon_C4_and_and; break;
  3310. case Hexagon::BI__builtin_HEXAGON_C4_and_orn:
  3311. ID = Intrinsic::hexagon_C4_and_orn; break;
  3312. case Hexagon::BI__builtin_HEXAGON_C4_and_or:
  3313. ID = Intrinsic::hexagon_C4_and_or; break;
  3314. case Hexagon::BI__builtin_HEXAGON_C4_or_andn:
  3315. ID = Intrinsic::hexagon_C4_or_andn; break;
  3316. case Hexagon::BI__builtin_HEXAGON_C4_or_and:
  3317. ID = Intrinsic::hexagon_C4_or_and; break;
  3318. case Hexagon::BI__builtin_HEXAGON_C4_or_orn:
  3319. ID = Intrinsic::hexagon_C4_or_orn; break;
  3320. case Hexagon::BI__builtin_HEXAGON_C4_or_or:
  3321. ID = Intrinsic::hexagon_C4_or_or; break;
  3322. case Hexagon::BI__builtin_HEXAGON_S4_addaddi:
  3323. ID = Intrinsic::hexagon_S4_addaddi; break;
  3324. case Hexagon::BI__builtin_HEXAGON_S4_subaddi:
  3325. ID = Intrinsic::hexagon_S4_subaddi; break;
  3326. case Hexagon::BI__builtin_HEXAGON_M4_xor_xacc:
  3327. ID = Intrinsic::hexagon_M4_xor_xacc; break;
  3328. case Hexagon::BI__builtin_HEXAGON_M4_and_and:
  3329. ID = Intrinsic::hexagon_M4_and_and; break;
  3330. case Hexagon::BI__builtin_HEXAGON_M4_and_or:
  3331. ID = Intrinsic::hexagon_M4_and_or; break;
  3332. case Hexagon::BI__builtin_HEXAGON_M4_and_xor:
  3333. ID = Intrinsic::hexagon_M4_and_xor; break;
  3334. case Hexagon::BI__builtin_HEXAGON_M4_and_andn:
  3335. ID = Intrinsic::hexagon_M4_and_andn; break;
  3336. case Hexagon::BI__builtin_HEXAGON_M4_xor_and:
  3337. ID = Intrinsic::hexagon_M4_xor_and; break;
  3338. case Hexagon::BI__builtin_HEXAGON_M4_xor_or:
  3339. ID = Intrinsic::hexagon_M4_xor_or; break;
  3340. case Hexagon::BI__builtin_HEXAGON_M4_xor_andn:
  3341. ID = Intrinsic::hexagon_M4_xor_andn; break;
  3342. case Hexagon::BI__builtin_HEXAGON_M4_or_and:
  3343. ID = Intrinsic::hexagon_M4_or_and; break;
  3344. case Hexagon::BI__builtin_HEXAGON_M4_or_or:
  3345. ID = Intrinsic::hexagon_M4_or_or; break;
  3346. case Hexagon::BI__builtin_HEXAGON_M4_or_xor:
  3347. ID = Intrinsic::hexagon_M4_or_xor; break;
  3348. case Hexagon::BI__builtin_HEXAGON_M4_or_andn:
  3349. ID = Intrinsic::hexagon_M4_or_andn; break;
  3350. case Hexagon::BI__builtin_HEXAGON_S4_or_andix:
  3351. ID = Intrinsic::hexagon_S4_or_andix; break;
  3352. case Hexagon::BI__builtin_HEXAGON_S4_or_andi:
  3353. ID = Intrinsic::hexagon_S4_or_andi; break;
  3354. case Hexagon::BI__builtin_HEXAGON_S4_or_ori:
  3355. ID = Intrinsic::hexagon_S4_or_ori; break;
  3356. case Hexagon::BI__builtin_HEXAGON_A4_modwrapu:
  3357. ID = Intrinsic::hexagon_A4_modwrapu; break;
  3358. case Hexagon::BI__builtin_HEXAGON_A4_cround_rr:
  3359. ID = Intrinsic::hexagon_A4_cround_rr; break;
  3360. case Hexagon::BI__builtin_HEXAGON_A4_round_ri:
  3361. ID = Intrinsic::hexagon_A4_round_ri; break;
  3362. case Hexagon::BI__builtin_HEXAGON_A4_round_rr:
  3363. ID = Intrinsic::hexagon_A4_round_rr; break;
  3364. case Hexagon::BI__builtin_HEXAGON_A4_round_ri_sat:
  3365. ID = Intrinsic::hexagon_A4_round_ri_sat; break;
  3366. case Hexagon::BI__builtin_HEXAGON_A4_round_rr_sat:
  3367. ID = Intrinsic::hexagon_A4_round_rr_sat; break;
  3368. }
  3369. llvm::Function *F = CGM.getIntrinsic(ID);
  3370. return Builder.CreateCall(F, Ops, "");
  3371. }
  3372. Value *CodeGenFunction::EmitPPCBuiltinExpr(unsigned BuiltinID,
  3373. const CallExpr *E) {
  3374. SmallVector<Value*, 4> Ops;
  3375. for (unsigned i = 0, e = E->getNumArgs(); i != e; i++)
  3376. Ops.push_back(EmitScalarExpr(E->getArg(i)));
  3377. Intrinsic::ID ID = Intrinsic::not_intrinsic;
  3378. switch (BuiltinID) {
  3379. default: return 0;
  3380. // vec_ld, vec_lvsl, vec_lvsr
  3381. case PPC::BI__builtin_altivec_lvx:
  3382. case PPC::BI__builtin_altivec_lvxl:
  3383. case PPC::BI__builtin_altivec_lvebx:
  3384. case PPC::BI__builtin_altivec_lvehx:
  3385. case PPC::BI__builtin_altivec_lvewx:
  3386. case PPC::BI__builtin_altivec_lvsl:
  3387. case PPC::BI__builtin_altivec_lvsr:
  3388. {
  3389. Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy);
  3390. Ops[0] = Builder.CreateGEP(Ops[1], Ops[0]);
  3391. Ops.pop_back();
  3392. switch (BuiltinID) {
  3393. default: llvm_unreachable("Unsupported ld/lvsl/lvsr intrinsic!");
  3394. case PPC::BI__builtin_altivec_lvx:
  3395. ID = Intrinsic::ppc_altivec_lvx;
  3396. break;
  3397. case PPC::BI__builtin_altivec_lvxl:
  3398. ID = Intrinsic::ppc_altivec_lvxl;
  3399. break;
  3400. case PPC::BI__builtin_altivec_lvebx:
  3401. ID = Intrinsic::ppc_altivec_lvebx;
  3402. break;
  3403. case PPC::BI__builtin_altivec_lvehx:
  3404. ID = Intrinsic::ppc_altivec_lvehx;
  3405. break;
  3406. case PPC::BI__builtin_altivec_lvewx:
  3407. ID = Intrinsic::ppc_altivec_lvewx;
  3408. break;
  3409. case PPC::BI__builtin_altivec_lvsl:
  3410. ID = Intrinsic::ppc_altivec_lvsl;
  3411. break;
  3412. case PPC::BI__builtin_altivec_lvsr:
  3413. ID = Intrinsic::ppc_altivec_lvsr;
  3414. break;
  3415. }
  3416. llvm::Function *F = CGM.getIntrinsic(ID);
  3417. return Builder.CreateCall(F, Ops, "");
  3418. }
  3419. // vec_st
  3420. case PPC::BI__builtin_altivec_stvx:
  3421. case PPC::BI__builtin_altivec_stvxl:
  3422. case PPC::BI__builtin_altivec_stvebx:
  3423. case PPC::BI__builtin_altivec_stvehx:
  3424. case PPC::BI__builtin_altivec_stvewx:
  3425. {
  3426. Ops[2] = Builder.CreateBitCast(Ops[2], Int8PtrTy);
  3427. Ops[1] = Builder.CreateGEP(Ops[2], Ops[1]);
  3428. Ops.pop_back();
  3429. switch (BuiltinID) {
  3430. default: llvm_unreachable("Unsupported st intrinsic!");
  3431. case PPC::BI__builtin_altivec_stvx:
  3432. ID = Intrinsic::ppc_altivec_stvx;
  3433. break;
  3434. case PPC::BI__builtin_altivec_stvxl:
  3435. ID = Intrinsic::ppc_altivec_stvxl;
  3436. break;
  3437. case PPC::BI__builtin_altivec_stvebx:
  3438. ID = Intrinsic::ppc_altivec_stvebx;
  3439. break;
  3440. case PPC::BI__builtin_altivec_stvehx:
  3441. ID = Intrinsic::ppc_altivec_stvehx;
  3442. break;
  3443. case PPC::BI__builtin_altivec_stvewx:
  3444. ID = Intrinsic::ppc_altivec_stvewx;
  3445. break;
  3446. }
  3447. llvm::Function *F = CGM.getIntrinsic(ID);
  3448. return Builder.CreateCall(F, Ops, "");
  3449. }
  3450. }
  3451. }