1 Star 0 Fork 139

fly_fzc/gcc

forked from src-openEuler/gcc 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
LoongArch-Add-Loongson-SX-directive-builtin-function.patch 312.00 KB
一键复制 编辑 原始数据 按行查看 历史
ticat_fp 提交于 2024-03-26 09:26 . LoongArch: update from gcc upstream
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549
From aafa5ab8c53dd2919d417b2f47e0c0e63ca7e10d Mon Sep 17 00:00:00 2001
From: Lulu Cheng <chenglulu@loongson.cn>
Date: Thu, 16 Mar 2023 16:31:04 +0800
Subject: [PATCH 064/124] LoongArch: Add Loongson SX directive builtin function
support.
gcc/ChangeLog:
* config.gcc: Export the header file lsxintrin.h.
* config/loongarch/loongarch-builtins.cc (LARCH_FTYPE_NAME4): Add builtin function support.
(enum loongarch_builtin_type): Ditto.
(AVAIL_ALL): Ditto.
(LARCH_BUILTIN): Ditto.
(LSX_BUILTIN): Ditto.
(LSX_BUILTIN_TEST_BRANCH): Ditto.
(LSX_NO_TARGET_BUILTIN): Ditto.
(CODE_FOR_lsx_vsadd_b): Ditto.
(CODE_FOR_lsx_vsadd_h): Ditto.
(CODE_FOR_lsx_vsadd_w): Ditto.
(CODE_FOR_lsx_vsadd_d): Ditto.
(CODE_FOR_lsx_vsadd_bu): Ditto.
(CODE_FOR_lsx_vsadd_hu): Ditto.
(CODE_FOR_lsx_vsadd_wu): Ditto.
(CODE_FOR_lsx_vsadd_du): Ditto.
(CODE_FOR_lsx_vadd_b): Ditto.
(CODE_FOR_lsx_vadd_h): Ditto.
(CODE_FOR_lsx_vadd_w): Ditto.
(CODE_FOR_lsx_vadd_d): Ditto.
(CODE_FOR_lsx_vaddi_bu): Ditto.
(CODE_FOR_lsx_vaddi_hu): Ditto.
(CODE_FOR_lsx_vaddi_wu): Ditto.
(CODE_FOR_lsx_vaddi_du): Ditto.
(CODE_FOR_lsx_vand_v): Ditto.
(CODE_FOR_lsx_vandi_b): Ditto.
(CODE_FOR_lsx_bnz_v): Ditto.
(CODE_FOR_lsx_bz_v): Ditto.
(CODE_FOR_lsx_vbitsel_v): Ditto.
(CODE_FOR_lsx_vseqi_b): Ditto.
(CODE_FOR_lsx_vseqi_h): Ditto.
(CODE_FOR_lsx_vseqi_w): Ditto.
(CODE_FOR_lsx_vseqi_d): Ditto.
(CODE_FOR_lsx_vslti_b): Ditto.
(CODE_FOR_lsx_vslti_h): Ditto.
(CODE_FOR_lsx_vslti_w): Ditto.
(CODE_FOR_lsx_vslti_d): Ditto.
(CODE_FOR_lsx_vslti_bu): Ditto.
(CODE_FOR_lsx_vslti_hu): Ditto.
(CODE_FOR_lsx_vslti_wu): Ditto.
(CODE_FOR_lsx_vslti_du): Ditto.
(CODE_FOR_lsx_vslei_b): Ditto.
(CODE_FOR_lsx_vslei_h): Ditto.
(CODE_FOR_lsx_vslei_w): Ditto.
(CODE_FOR_lsx_vslei_d): Ditto.
(CODE_FOR_lsx_vslei_bu): Ditto.
(CODE_FOR_lsx_vslei_hu): Ditto.
(CODE_FOR_lsx_vslei_wu): Ditto.
(CODE_FOR_lsx_vslei_du): Ditto.
(CODE_FOR_lsx_vdiv_b): Ditto.
(CODE_FOR_lsx_vdiv_h): Ditto.
(CODE_FOR_lsx_vdiv_w): Ditto.
(CODE_FOR_lsx_vdiv_d): Ditto.
(CODE_FOR_lsx_vdiv_bu): Ditto.
(CODE_FOR_lsx_vdiv_hu): Ditto.
(CODE_FOR_lsx_vdiv_wu): Ditto.
(CODE_FOR_lsx_vdiv_du): Ditto.
(CODE_FOR_lsx_vfadd_s): Ditto.
(CODE_FOR_lsx_vfadd_d): Ditto.
(CODE_FOR_lsx_vftintrz_w_s): Ditto.
(CODE_FOR_lsx_vftintrz_l_d): Ditto.
(CODE_FOR_lsx_vftintrz_wu_s): Ditto.
(CODE_FOR_lsx_vftintrz_lu_d): Ditto.
(CODE_FOR_lsx_vffint_s_w): Ditto.
(CODE_FOR_lsx_vffint_d_l): Ditto.
(CODE_FOR_lsx_vffint_s_wu): Ditto.
(CODE_FOR_lsx_vffint_d_lu): Ditto.
(CODE_FOR_lsx_vfsub_s): Ditto.
(CODE_FOR_lsx_vfsub_d): Ditto.
(CODE_FOR_lsx_vfmul_s): Ditto.
(CODE_FOR_lsx_vfmul_d): Ditto.
(CODE_FOR_lsx_vfdiv_s): Ditto.
(CODE_FOR_lsx_vfdiv_d): Ditto.
(CODE_FOR_lsx_vfmax_s): Ditto.
(CODE_FOR_lsx_vfmax_d): Ditto.
(CODE_FOR_lsx_vfmin_s): Ditto.
(CODE_FOR_lsx_vfmin_d): Ditto.
(CODE_FOR_lsx_vfsqrt_s): Ditto.
(CODE_FOR_lsx_vfsqrt_d): Ditto.
(CODE_FOR_lsx_vflogb_s): Ditto.
(CODE_FOR_lsx_vflogb_d): Ditto.
(CODE_FOR_lsx_vmax_b): Ditto.
(CODE_FOR_lsx_vmax_h): Ditto.
(CODE_FOR_lsx_vmax_w): Ditto.
(CODE_FOR_lsx_vmax_d): Ditto.
(CODE_FOR_lsx_vmaxi_b): Ditto.
(CODE_FOR_lsx_vmaxi_h): Ditto.
(CODE_FOR_lsx_vmaxi_w): Ditto.
(CODE_FOR_lsx_vmaxi_d): Ditto.
(CODE_FOR_lsx_vmax_bu): Ditto.
(CODE_FOR_lsx_vmax_hu): Ditto.
(CODE_FOR_lsx_vmax_wu): Ditto.
(CODE_FOR_lsx_vmax_du): Ditto.
(CODE_FOR_lsx_vmaxi_bu): Ditto.
(CODE_FOR_lsx_vmaxi_hu): Ditto.
(CODE_FOR_lsx_vmaxi_wu): Ditto.
(CODE_FOR_lsx_vmaxi_du): Ditto.
(CODE_FOR_lsx_vmin_b): Ditto.
(CODE_FOR_lsx_vmin_h): Ditto.
(CODE_FOR_lsx_vmin_w): Ditto.
(CODE_FOR_lsx_vmin_d): Ditto.
(CODE_FOR_lsx_vmini_b): Ditto.
(CODE_FOR_lsx_vmini_h): Ditto.
(CODE_FOR_lsx_vmini_w): Ditto.
(CODE_FOR_lsx_vmini_d): Ditto.
(CODE_FOR_lsx_vmin_bu): Ditto.
(CODE_FOR_lsx_vmin_hu): Ditto.
(CODE_FOR_lsx_vmin_wu): Ditto.
(CODE_FOR_lsx_vmin_du): Ditto.
(CODE_FOR_lsx_vmini_bu): Ditto.
(CODE_FOR_lsx_vmini_hu): Ditto.
(CODE_FOR_lsx_vmini_wu): Ditto.
(CODE_FOR_lsx_vmini_du): Ditto.
(CODE_FOR_lsx_vmod_b): Ditto.
(CODE_FOR_lsx_vmod_h): Ditto.
(CODE_FOR_lsx_vmod_w): Ditto.
(CODE_FOR_lsx_vmod_d): Ditto.
(CODE_FOR_lsx_vmod_bu): Ditto.
(CODE_FOR_lsx_vmod_hu): Ditto.
(CODE_FOR_lsx_vmod_wu): Ditto.
(CODE_FOR_lsx_vmod_du): Ditto.
(CODE_FOR_lsx_vmul_b): Ditto.
(CODE_FOR_lsx_vmul_h): Ditto.
(CODE_FOR_lsx_vmul_w): Ditto.
(CODE_FOR_lsx_vmul_d): Ditto.
(CODE_FOR_lsx_vclz_b): Ditto.
(CODE_FOR_lsx_vclz_h): Ditto.
(CODE_FOR_lsx_vclz_w): Ditto.
(CODE_FOR_lsx_vclz_d): Ditto.
(CODE_FOR_lsx_vnor_v): Ditto.
(CODE_FOR_lsx_vor_v): Ditto.
(CODE_FOR_lsx_vori_b): Ditto.
(CODE_FOR_lsx_vnori_b): Ditto.
(CODE_FOR_lsx_vpcnt_b): Ditto.
(CODE_FOR_lsx_vpcnt_h): Ditto.
(CODE_FOR_lsx_vpcnt_w): Ditto.
(CODE_FOR_lsx_vpcnt_d): Ditto.
(CODE_FOR_lsx_vxor_v): Ditto.
(CODE_FOR_lsx_vxori_b): Ditto.
(CODE_FOR_lsx_vsll_b): Ditto.
(CODE_FOR_lsx_vsll_h): Ditto.
(CODE_FOR_lsx_vsll_w): Ditto.
(CODE_FOR_lsx_vsll_d): Ditto.
(CODE_FOR_lsx_vslli_b): Ditto.
(CODE_FOR_lsx_vslli_h): Ditto.
(CODE_FOR_lsx_vslli_w): Ditto.
(CODE_FOR_lsx_vslli_d): Ditto.
(CODE_FOR_lsx_vsra_b): Ditto.
(CODE_FOR_lsx_vsra_h): Ditto.
(CODE_FOR_lsx_vsra_w): Ditto.
(CODE_FOR_lsx_vsra_d): Ditto.
(CODE_FOR_lsx_vsrai_b): Ditto.
(CODE_FOR_lsx_vsrai_h): Ditto.
(CODE_FOR_lsx_vsrai_w): Ditto.
(CODE_FOR_lsx_vsrai_d): Ditto.
(CODE_FOR_lsx_vsrl_b): Ditto.
(CODE_FOR_lsx_vsrl_h): Ditto.
(CODE_FOR_lsx_vsrl_w): Ditto.
(CODE_FOR_lsx_vsrl_d): Ditto.
(CODE_FOR_lsx_vsrli_b): Ditto.
(CODE_FOR_lsx_vsrli_h): Ditto.
(CODE_FOR_lsx_vsrli_w): Ditto.
(CODE_FOR_lsx_vsrli_d): Ditto.
(CODE_FOR_lsx_vsub_b): Ditto.
(CODE_FOR_lsx_vsub_h): Ditto.
(CODE_FOR_lsx_vsub_w): Ditto.
(CODE_FOR_lsx_vsub_d): Ditto.
(CODE_FOR_lsx_vsubi_bu): Ditto.
(CODE_FOR_lsx_vsubi_hu): Ditto.
(CODE_FOR_lsx_vsubi_wu): Ditto.
(CODE_FOR_lsx_vsubi_du): Ditto.
(CODE_FOR_lsx_vpackod_d): Ditto.
(CODE_FOR_lsx_vpackev_d): Ditto.
(CODE_FOR_lsx_vpickod_d): Ditto.
(CODE_FOR_lsx_vpickev_d): Ditto.
(CODE_FOR_lsx_vrepli_b): Ditto.
(CODE_FOR_lsx_vrepli_h): Ditto.
(CODE_FOR_lsx_vrepli_w): Ditto.
(CODE_FOR_lsx_vrepli_d): Ditto.
(CODE_FOR_lsx_vsat_b): Ditto.
(CODE_FOR_lsx_vsat_h): Ditto.
(CODE_FOR_lsx_vsat_w): Ditto.
(CODE_FOR_lsx_vsat_d): Ditto.
(CODE_FOR_lsx_vsat_bu): Ditto.
(CODE_FOR_lsx_vsat_hu): Ditto.
(CODE_FOR_lsx_vsat_wu): Ditto.
(CODE_FOR_lsx_vsat_du): Ditto.
(CODE_FOR_lsx_vavg_b): Ditto.
(CODE_FOR_lsx_vavg_h): Ditto.
(CODE_FOR_lsx_vavg_w): Ditto.
(CODE_FOR_lsx_vavg_d): Ditto.
(CODE_FOR_lsx_vavg_bu): Ditto.
(CODE_FOR_lsx_vavg_hu): Ditto.
(CODE_FOR_lsx_vavg_wu): Ditto.
(CODE_FOR_lsx_vavg_du): Ditto.
(CODE_FOR_lsx_vavgr_b): Ditto.
(CODE_FOR_lsx_vavgr_h): Ditto.
(CODE_FOR_lsx_vavgr_w): Ditto.
(CODE_FOR_lsx_vavgr_d): Ditto.
(CODE_FOR_lsx_vavgr_bu): Ditto.
(CODE_FOR_lsx_vavgr_hu): Ditto.
(CODE_FOR_lsx_vavgr_wu): Ditto.
(CODE_FOR_lsx_vavgr_du): Ditto.
(CODE_FOR_lsx_vssub_b): Ditto.
(CODE_FOR_lsx_vssub_h): Ditto.
(CODE_FOR_lsx_vssub_w): Ditto.
(CODE_FOR_lsx_vssub_d): Ditto.
(CODE_FOR_lsx_vssub_bu): Ditto.
(CODE_FOR_lsx_vssub_hu): Ditto.
(CODE_FOR_lsx_vssub_wu): Ditto.
(CODE_FOR_lsx_vssub_du): Ditto.
(CODE_FOR_lsx_vabsd_b): Ditto.
(CODE_FOR_lsx_vabsd_h): Ditto.
(CODE_FOR_lsx_vabsd_w): Ditto.
(CODE_FOR_lsx_vabsd_d): Ditto.
(CODE_FOR_lsx_vabsd_bu): Ditto.
(CODE_FOR_lsx_vabsd_hu): Ditto.
(CODE_FOR_lsx_vabsd_wu): Ditto.
(CODE_FOR_lsx_vabsd_du): Ditto.
(CODE_FOR_lsx_vftint_w_s): Ditto.
(CODE_FOR_lsx_vftint_l_d): Ditto.
(CODE_FOR_lsx_vftint_wu_s): Ditto.
(CODE_FOR_lsx_vftint_lu_d): Ditto.
(CODE_FOR_lsx_vandn_v): Ditto.
(CODE_FOR_lsx_vorn_v): Ditto.
(CODE_FOR_lsx_vneg_b): Ditto.
(CODE_FOR_lsx_vneg_h): Ditto.
(CODE_FOR_lsx_vneg_w): Ditto.
(CODE_FOR_lsx_vneg_d): Ditto.
(CODE_FOR_lsx_vshuf4i_d): Ditto.
(CODE_FOR_lsx_vbsrl_v): Ditto.
(CODE_FOR_lsx_vbsll_v): Ditto.
(CODE_FOR_lsx_vfmadd_s): Ditto.
(CODE_FOR_lsx_vfmadd_d): Ditto.
(CODE_FOR_lsx_vfmsub_s): Ditto.
(CODE_FOR_lsx_vfmsub_d): Ditto.
(CODE_FOR_lsx_vfnmadd_s): Ditto.
(CODE_FOR_lsx_vfnmadd_d): Ditto.
(CODE_FOR_lsx_vfnmsub_s): Ditto.
(CODE_FOR_lsx_vfnmsub_d): Ditto.
(CODE_FOR_lsx_vmuh_b): Ditto.
(CODE_FOR_lsx_vmuh_h): Ditto.
(CODE_FOR_lsx_vmuh_w): Ditto.
(CODE_FOR_lsx_vmuh_d): Ditto.
(CODE_FOR_lsx_vmuh_bu): Ditto.
(CODE_FOR_lsx_vmuh_hu): Ditto.
(CODE_FOR_lsx_vmuh_wu): Ditto.
(CODE_FOR_lsx_vmuh_du): Ditto.
(CODE_FOR_lsx_vsllwil_h_b): Ditto.
(CODE_FOR_lsx_vsllwil_w_h): Ditto.
(CODE_FOR_lsx_vsllwil_d_w): Ditto.
(CODE_FOR_lsx_vsllwil_hu_bu): Ditto.
(CODE_FOR_lsx_vsllwil_wu_hu): Ditto.
(CODE_FOR_lsx_vsllwil_du_wu): Ditto.
(CODE_FOR_lsx_vssran_b_h): Ditto.
(CODE_FOR_lsx_vssran_h_w): Ditto.
(CODE_FOR_lsx_vssran_w_d): Ditto.
(CODE_FOR_lsx_vssran_bu_h): Ditto.
(CODE_FOR_lsx_vssran_hu_w): Ditto.
(CODE_FOR_lsx_vssran_wu_d): Ditto.
(CODE_FOR_lsx_vssrarn_b_h): Ditto.
(CODE_FOR_lsx_vssrarn_h_w): Ditto.
(CODE_FOR_lsx_vssrarn_w_d): Ditto.
(CODE_FOR_lsx_vssrarn_bu_h): Ditto.
(CODE_FOR_lsx_vssrarn_hu_w): Ditto.
(CODE_FOR_lsx_vssrarn_wu_d): Ditto.
(CODE_FOR_lsx_vssrln_bu_h): Ditto.
(CODE_FOR_lsx_vssrln_hu_w): Ditto.
(CODE_FOR_lsx_vssrln_wu_d): Ditto.
(CODE_FOR_lsx_vssrlrn_bu_h): Ditto.
(CODE_FOR_lsx_vssrlrn_hu_w): Ditto.
(CODE_FOR_lsx_vssrlrn_wu_d): Ditto.
(loongarch_builtin_vector_type): Ditto.
(loongarch_build_cvpointer_type): Ditto.
(LARCH_ATYPE_CVPOINTER): Ditto.
(LARCH_ATYPE_BOOLEAN): Ditto.
(LARCH_ATYPE_V2SF): Ditto.
(LARCH_ATYPE_V2HI): Ditto.
(LARCH_ATYPE_V2SI): Ditto.
(LARCH_ATYPE_V4QI): Ditto.
(LARCH_ATYPE_V4HI): Ditto.
(LARCH_ATYPE_V8QI): Ditto.
(LARCH_ATYPE_V2DI): Ditto.
(LARCH_ATYPE_V4SI): Ditto.
(LARCH_ATYPE_V8HI): Ditto.
(LARCH_ATYPE_V16QI): Ditto.
(LARCH_ATYPE_V2DF): Ditto.
(LARCH_ATYPE_V4SF): Ditto.
(LARCH_ATYPE_V4DI): Ditto.
(LARCH_ATYPE_V8SI): Ditto.
(LARCH_ATYPE_V16HI): Ditto.
(LARCH_ATYPE_V32QI): Ditto.
(LARCH_ATYPE_V4DF): Ditto.
(LARCH_ATYPE_V8SF): Ditto.
(LARCH_ATYPE_UV2DI): Ditto.
(LARCH_ATYPE_UV4SI): Ditto.
(LARCH_ATYPE_UV8HI): Ditto.
(LARCH_ATYPE_UV16QI): Ditto.
(LARCH_ATYPE_UV4DI): Ditto.
(LARCH_ATYPE_UV8SI): Ditto.
(LARCH_ATYPE_UV16HI): Ditto.
(LARCH_ATYPE_UV32QI): Ditto.
(LARCH_ATYPE_UV2SI): Ditto.
(LARCH_ATYPE_UV4HI): Ditto.
(LARCH_ATYPE_UV8QI): Ditto.
(loongarch_builtin_vectorized_function): Ditto.
(LARCH_GET_BUILTIN): Ditto.
(loongarch_expand_builtin_insn): Ditto.
(loongarch_expand_builtin_lsx_test_branch): Ditto.
(loongarch_expand_builtin): Ditto.
* config/loongarch/loongarch-ftypes.def (1): Ditto.
(2): Ditto.
(3): Ditto.
(4): Ditto.
* config/loongarch/lsxintrin.h: New file.
Signed-off-by: Peng Fan <fanpeng@loongson.cn>
Signed-off-by: ticat_fp <fanpeng@loongson.cn>
---
gcc/config.gcc | 2 +-
gcc/config/loongarch/loongarch-builtins.cc | 1498 +++++-
gcc/config/loongarch/loongarch-ftypes.def | 395 +-
gcc/config/loongarch/lsxintrin.h | 5181 ++++++++++++++++++++
4 files changed, 7070 insertions(+), 6 deletions(-)
create mode 100644 gcc/config/loongarch/lsxintrin.h
diff --git a/gcc/config.gcc b/gcc/config.gcc
index 61d81d8d8..4e149e0ef 100644
--- a/gcc/config.gcc
+++ b/gcc/config.gcc
@@ -456,7 +456,7 @@ mips*-*-*)
;;
loongarch*-*-*)
cpu_type=loongarch
- extra_headers="larchintrin.h"
+ extra_headers="larchintrin.h lsxintrin.h"
extra_objs="loongarch-c.o loongarch-builtins.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
extra_gcc_objs="loongarch-driver.o loongarch-cpu.o loongarch-opts.o loongarch-def.o"
extra_options="${extra_options} g.opt fused-madd.opt"
diff --git a/gcc/config/loongarch/loongarch-builtins.cc b/gcc/config/loongarch/loongarch-builtins.cc
index c8548a07f..de6428ac6 100644
--- a/gcc/config/loongarch/loongarch-builtins.cc
+++ b/gcc/config/loongarch/loongarch-builtins.cc
@@ -34,14 +34,18 @@ along with GCC; see the file COPYING3. If not see
#include "recog.h"
#include "diagnostic.h"
#include "fold-const.h"
+#include "explow.h"
#include "expr.h"
#include "langhooks.h"
#include "emit-rtl.h"
+#include "case-cfn-macros.h"
/* Macros to create an enumeration identifier for a function prototype. */
#define LARCH_FTYPE_NAME1(A, B) LARCH_##A##_FTYPE_##B
#define LARCH_FTYPE_NAME2(A, B, C) LARCH_##A##_FTYPE_##B##_##C
#define LARCH_FTYPE_NAME3(A, B, C, D) LARCH_##A##_FTYPE_##B##_##C##_##D
+#define LARCH_FTYPE_NAME4(A, B, C, D, E) \
+ LARCH_##A##_FTYPE_##B##_##C##_##D##_##E
/* Classifies the prototype of a built-in function. */
enum loongarch_function_type
@@ -64,6 +68,12 @@ enum loongarch_builtin_type
value and the arguments are mapped to operands 0 and above. */
LARCH_BUILTIN_DIRECT_NO_TARGET,
+ /* For generating LoongArch LSX. */
+ LARCH_BUILTIN_LSX,
+
+ /* The function corresponds to an LSX conditional branch instruction
+ combined with a compare instruction. */
+ LARCH_BUILTIN_LSX_TEST_BRANCH,
};
/* Declare an availability predicate for built-in functions that require
@@ -101,6 +111,7 @@ struct loongarch_builtin_description
};
AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
+AVAIL_ALL (lsx, ISA_HAS_LSX)
/* Construct a loongarch_builtin_description from the given arguments.
@@ -120,8 +131,8 @@ AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
#define LARCH_BUILTIN(INSN, NAME, BUILTIN_TYPE, FUNCTION_TYPE, AVAIL) \
{ \
CODE_FOR_loongarch_##INSN, "__builtin_loongarch_" NAME, \
- BUILTIN_TYPE, FUNCTION_TYPE, \
- loongarch_builtin_avail_##AVAIL \
+ BUILTIN_TYPE, FUNCTION_TYPE, \
+ loongarch_builtin_avail_##AVAIL \
}
/* Define __builtin_loongarch_<INSN>, which is a LARCH_BUILTIN_DIRECT function
@@ -137,6 +148,300 @@ AVAIL_ALL (hard_float, TARGET_HARD_FLOAT_ABI)
LARCH_BUILTIN (INSN, #INSN, LARCH_BUILTIN_DIRECT_NO_TARGET, \
FUNCTION_TYPE, AVAIL)
+/* Define an LSX LARCH_BUILTIN_DIRECT function __builtin_lsx_<INSN>
+ for instruction CODE_FOR_lsx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LSX_BUILTIN(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lsx_ ## INSN, \
+ "__builtin_lsx_" #INSN, LARCH_BUILTIN_DIRECT, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+
+/* Define an LSX LARCH_BUILTIN_LSX_TEST_BRANCH function __builtin_lsx_<INSN>
+ for instruction CODE_FOR_lsx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LSX_BUILTIN_TEST_BRANCH(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lsx_ ## INSN, \
+ "__builtin_lsx_" #INSN, LARCH_BUILTIN_LSX_TEST_BRANCH, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+/* Define an LSX LARCH_BUILTIN_DIRECT_NO_TARGET function __builtin_lsx_<INSN>
+ for instruction CODE_FOR_lsx_<INSN>. FUNCTION_TYPE is a builtin_description
+ field. */
+#define LSX_NO_TARGET_BUILTIN(INSN, FUNCTION_TYPE) \
+ { CODE_FOR_lsx_ ## INSN, \
+ "__builtin_lsx_" #INSN, LARCH_BUILTIN_DIRECT_NO_TARGET, \
+ FUNCTION_TYPE, loongarch_builtin_avail_lsx }
+
+/* LoongArch SX define CODE_FOR_lsx_xxx */
+#define CODE_FOR_lsx_vsadd_b CODE_FOR_ssaddv16qi3
+#define CODE_FOR_lsx_vsadd_h CODE_FOR_ssaddv8hi3
+#define CODE_FOR_lsx_vsadd_w CODE_FOR_ssaddv4si3
+#define CODE_FOR_lsx_vsadd_d CODE_FOR_ssaddv2di3
+#define CODE_FOR_lsx_vsadd_bu CODE_FOR_usaddv16qi3
+#define CODE_FOR_lsx_vsadd_hu CODE_FOR_usaddv8hi3
+#define CODE_FOR_lsx_vsadd_wu CODE_FOR_usaddv4si3
+#define CODE_FOR_lsx_vsadd_du CODE_FOR_usaddv2di3
+#define CODE_FOR_lsx_vadd_b CODE_FOR_addv16qi3
+#define CODE_FOR_lsx_vadd_h CODE_FOR_addv8hi3
+#define CODE_FOR_lsx_vadd_w CODE_FOR_addv4si3
+#define CODE_FOR_lsx_vadd_d CODE_FOR_addv2di3
+#define CODE_FOR_lsx_vaddi_bu CODE_FOR_addv16qi3
+#define CODE_FOR_lsx_vaddi_hu CODE_FOR_addv8hi3
+#define CODE_FOR_lsx_vaddi_wu CODE_FOR_addv4si3
+#define CODE_FOR_lsx_vaddi_du CODE_FOR_addv2di3
+#define CODE_FOR_lsx_vand_v CODE_FOR_andv16qi3
+#define CODE_FOR_lsx_vandi_b CODE_FOR_andv16qi3
+#define CODE_FOR_lsx_bnz_v CODE_FOR_lsx_bnz_v_b
+#define CODE_FOR_lsx_bz_v CODE_FOR_lsx_bz_v_b
+#define CODE_FOR_lsx_vbitsel_v CODE_FOR_lsx_vbitsel_b
+#define CODE_FOR_lsx_vseqi_b CODE_FOR_lsx_vseq_b
+#define CODE_FOR_lsx_vseqi_h CODE_FOR_lsx_vseq_h
+#define CODE_FOR_lsx_vseqi_w CODE_FOR_lsx_vseq_w
+#define CODE_FOR_lsx_vseqi_d CODE_FOR_lsx_vseq_d
+#define CODE_FOR_lsx_vslti_b CODE_FOR_lsx_vslt_b
+#define CODE_FOR_lsx_vslti_h CODE_FOR_lsx_vslt_h
+#define CODE_FOR_lsx_vslti_w CODE_FOR_lsx_vslt_w
+#define CODE_FOR_lsx_vslti_d CODE_FOR_lsx_vslt_d
+#define CODE_FOR_lsx_vslti_bu CODE_FOR_lsx_vslt_bu
+#define CODE_FOR_lsx_vslti_hu CODE_FOR_lsx_vslt_hu
+#define CODE_FOR_lsx_vslti_wu CODE_FOR_lsx_vslt_wu
+#define CODE_FOR_lsx_vslti_du CODE_FOR_lsx_vslt_du
+#define CODE_FOR_lsx_vslei_b CODE_FOR_lsx_vsle_b
+#define CODE_FOR_lsx_vslei_h CODE_FOR_lsx_vsle_h
+#define CODE_FOR_lsx_vslei_w CODE_FOR_lsx_vsle_w
+#define CODE_FOR_lsx_vslei_d CODE_FOR_lsx_vsle_d
+#define CODE_FOR_lsx_vslei_bu CODE_FOR_lsx_vsle_bu
+#define CODE_FOR_lsx_vslei_hu CODE_FOR_lsx_vsle_hu
+#define CODE_FOR_lsx_vslei_wu CODE_FOR_lsx_vsle_wu
+#define CODE_FOR_lsx_vslei_du CODE_FOR_lsx_vsle_du
+#define CODE_FOR_lsx_vdiv_b CODE_FOR_divv16qi3
+#define CODE_FOR_lsx_vdiv_h CODE_FOR_divv8hi3
+#define CODE_FOR_lsx_vdiv_w CODE_FOR_divv4si3
+#define CODE_FOR_lsx_vdiv_d CODE_FOR_divv2di3
+#define CODE_FOR_lsx_vdiv_bu CODE_FOR_udivv16qi3
+#define CODE_FOR_lsx_vdiv_hu CODE_FOR_udivv8hi3
+#define CODE_FOR_lsx_vdiv_wu CODE_FOR_udivv4si3
+#define CODE_FOR_lsx_vdiv_du CODE_FOR_udivv2di3
+#define CODE_FOR_lsx_vfadd_s CODE_FOR_addv4sf3
+#define CODE_FOR_lsx_vfadd_d CODE_FOR_addv2df3
+#define CODE_FOR_lsx_vftintrz_w_s CODE_FOR_fix_truncv4sfv4si2
+#define CODE_FOR_lsx_vftintrz_l_d CODE_FOR_fix_truncv2dfv2di2
+#define CODE_FOR_lsx_vftintrz_wu_s CODE_FOR_fixuns_truncv4sfv4si2
+#define CODE_FOR_lsx_vftintrz_lu_d CODE_FOR_fixuns_truncv2dfv2di2
+#define CODE_FOR_lsx_vffint_s_w CODE_FOR_floatv4siv4sf2
+#define CODE_FOR_lsx_vffint_d_l CODE_FOR_floatv2div2df2
+#define CODE_FOR_lsx_vffint_s_wu CODE_FOR_floatunsv4siv4sf2
+#define CODE_FOR_lsx_vffint_d_lu CODE_FOR_floatunsv2div2df2
+#define CODE_FOR_lsx_vfsub_s CODE_FOR_subv4sf3
+#define CODE_FOR_lsx_vfsub_d CODE_FOR_subv2df3
+#define CODE_FOR_lsx_vfmul_s CODE_FOR_mulv4sf3
+#define CODE_FOR_lsx_vfmul_d CODE_FOR_mulv2df3
+#define CODE_FOR_lsx_vfdiv_s CODE_FOR_divv4sf3
+#define CODE_FOR_lsx_vfdiv_d CODE_FOR_divv2df3
+#define CODE_FOR_lsx_vfmax_s CODE_FOR_smaxv4sf3
+#define CODE_FOR_lsx_vfmax_d CODE_FOR_smaxv2df3
+#define CODE_FOR_lsx_vfmin_s CODE_FOR_sminv4sf3
+#define CODE_FOR_lsx_vfmin_d CODE_FOR_sminv2df3
+#define CODE_FOR_lsx_vfsqrt_s CODE_FOR_sqrtv4sf2
+#define CODE_FOR_lsx_vfsqrt_d CODE_FOR_sqrtv2df2
+#define CODE_FOR_lsx_vflogb_s CODE_FOR_logbv4sf2
+#define CODE_FOR_lsx_vflogb_d CODE_FOR_logbv2df2
+#define CODE_FOR_lsx_vmax_b CODE_FOR_smaxv16qi3
+#define CODE_FOR_lsx_vmax_h CODE_FOR_smaxv8hi3
+#define CODE_FOR_lsx_vmax_w CODE_FOR_smaxv4si3
+#define CODE_FOR_lsx_vmax_d CODE_FOR_smaxv2di3
+#define CODE_FOR_lsx_vmaxi_b CODE_FOR_smaxv16qi3
+#define CODE_FOR_lsx_vmaxi_h CODE_FOR_smaxv8hi3
+#define CODE_FOR_lsx_vmaxi_w CODE_FOR_smaxv4si3
+#define CODE_FOR_lsx_vmaxi_d CODE_FOR_smaxv2di3
+#define CODE_FOR_lsx_vmax_bu CODE_FOR_umaxv16qi3
+#define CODE_FOR_lsx_vmax_hu CODE_FOR_umaxv8hi3
+#define CODE_FOR_lsx_vmax_wu CODE_FOR_umaxv4si3
+#define CODE_FOR_lsx_vmax_du CODE_FOR_umaxv2di3
+#define CODE_FOR_lsx_vmaxi_bu CODE_FOR_umaxv16qi3
+#define CODE_FOR_lsx_vmaxi_hu CODE_FOR_umaxv8hi3
+#define CODE_FOR_lsx_vmaxi_wu CODE_FOR_umaxv4si3
+#define CODE_FOR_lsx_vmaxi_du CODE_FOR_umaxv2di3
+#define CODE_FOR_lsx_vmin_b CODE_FOR_sminv16qi3
+#define CODE_FOR_lsx_vmin_h CODE_FOR_sminv8hi3
+#define CODE_FOR_lsx_vmin_w CODE_FOR_sminv4si3
+#define CODE_FOR_lsx_vmin_d CODE_FOR_sminv2di3
+#define CODE_FOR_lsx_vmini_b CODE_FOR_sminv16qi3
+#define CODE_FOR_lsx_vmini_h CODE_FOR_sminv8hi3
+#define CODE_FOR_lsx_vmini_w CODE_FOR_sminv4si3
+#define CODE_FOR_lsx_vmini_d CODE_FOR_sminv2di3
+#define CODE_FOR_lsx_vmin_bu CODE_FOR_uminv16qi3
+#define CODE_FOR_lsx_vmin_hu CODE_FOR_uminv8hi3
+#define CODE_FOR_lsx_vmin_wu CODE_FOR_uminv4si3
+#define CODE_FOR_lsx_vmin_du CODE_FOR_uminv2di3
+#define CODE_FOR_lsx_vmini_bu CODE_FOR_uminv16qi3
+#define CODE_FOR_lsx_vmini_hu CODE_FOR_uminv8hi3
+#define CODE_FOR_lsx_vmini_wu CODE_FOR_uminv4si3
+#define CODE_FOR_lsx_vmini_du CODE_FOR_uminv2di3
+#define CODE_FOR_lsx_vmod_b CODE_FOR_modv16qi3
+#define CODE_FOR_lsx_vmod_h CODE_FOR_modv8hi3
+#define CODE_FOR_lsx_vmod_w CODE_FOR_modv4si3
+#define CODE_FOR_lsx_vmod_d CODE_FOR_modv2di3
+#define CODE_FOR_lsx_vmod_bu CODE_FOR_umodv16qi3
+#define CODE_FOR_lsx_vmod_hu CODE_FOR_umodv8hi3
+#define CODE_FOR_lsx_vmod_wu CODE_FOR_umodv4si3
+#define CODE_FOR_lsx_vmod_du CODE_FOR_umodv2di3
+#define CODE_FOR_lsx_vmul_b CODE_FOR_mulv16qi3
+#define CODE_FOR_lsx_vmul_h CODE_FOR_mulv8hi3
+#define CODE_FOR_lsx_vmul_w CODE_FOR_mulv4si3
+#define CODE_FOR_lsx_vmul_d CODE_FOR_mulv2di3
+#define CODE_FOR_lsx_vclz_b CODE_FOR_clzv16qi2
+#define CODE_FOR_lsx_vclz_h CODE_FOR_clzv8hi2
+#define CODE_FOR_lsx_vclz_w CODE_FOR_clzv4si2
+#define CODE_FOR_lsx_vclz_d CODE_FOR_clzv2di2
+#define CODE_FOR_lsx_vnor_v CODE_FOR_lsx_nor_b
+#define CODE_FOR_lsx_vor_v CODE_FOR_iorv16qi3
+#define CODE_FOR_lsx_vori_b CODE_FOR_iorv16qi3
+#define CODE_FOR_lsx_vnori_b CODE_FOR_lsx_nor_b
+#define CODE_FOR_lsx_vpcnt_b CODE_FOR_popcountv16qi2
+#define CODE_FOR_lsx_vpcnt_h CODE_FOR_popcountv8hi2
+#define CODE_FOR_lsx_vpcnt_w CODE_FOR_popcountv4si2
+#define CODE_FOR_lsx_vpcnt_d CODE_FOR_popcountv2di2
+#define CODE_FOR_lsx_vxor_v CODE_FOR_xorv16qi3
+#define CODE_FOR_lsx_vxori_b CODE_FOR_xorv16qi3
+#define CODE_FOR_lsx_vsll_b CODE_FOR_vashlv16qi3
+#define CODE_FOR_lsx_vsll_h CODE_FOR_vashlv8hi3
+#define CODE_FOR_lsx_vsll_w CODE_FOR_vashlv4si3
+#define CODE_FOR_lsx_vsll_d CODE_FOR_vashlv2di3
+#define CODE_FOR_lsx_vslli_b CODE_FOR_vashlv16qi3
+#define CODE_FOR_lsx_vslli_h CODE_FOR_vashlv8hi3
+#define CODE_FOR_lsx_vslli_w CODE_FOR_vashlv4si3
+#define CODE_FOR_lsx_vslli_d CODE_FOR_vashlv2di3
+#define CODE_FOR_lsx_vsra_b CODE_FOR_vashrv16qi3
+#define CODE_FOR_lsx_vsra_h CODE_FOR_vashrv8hi3
+#define CODE_FOR_lsx_vsra_w CODE_FOR_vashrv4si3
+#define CODE_FOR_lsx_vsra_d CODE_FOR_vashrv2di3
+#define CODE_FOR_lsx_vsrai_b CODE_FOR_vashrv16qi3
+#define CODE_FOR_lsx_vsrai_h CODE_FOR_vashrv8hi3
+#define CODE_FOR_lsx_vsrai_w CODE_FOR_vashrv4si3
+#define CODE_FOR_lsx_vsrai_d CODE_FOR_vashrv2di3
+#define CODE_FOR_lsx_vsrl_b CODE_FOR_vlshrv16qi3
+#define CODE_FOR_lsx_vsrl_h CODE_FOR_vlshrv8hi3
+#define CODE_FOR_lsx_vsrl_w CODE_FOR_vlshrv4si3
+#define CODE_FOR_lsx_vsrl_d CODE_FOR_vlshrv2di3
+#define CODE_FOR_lsx_vsrli_b CODE_FOR_vlshrv16qi3
+#define CODE_FOR_lsx_vsrli_h CODE_FOR_vlshrv8hi3
+#define CODE_FOR_lsx_vsrli_w CODE_FOR_vlshrv4si3
+#define CODE_FOR_lsx_vsrli_d CODE_FOR_vlshrv2di3
+#define CODE_FOR_lsx_vsub_b CODE_FOR_subv16qi3
+#define CODE_FOR_lsx_vsub_h CODE_FOR_subv8hi3
+#define CODE_FOR_lsx_vsub_w CODE_FOR_subv4si3
+#define CODE_FOR_lsx_vsub_d CODE_FOR_subv2di3
+#define CODE_FOR_lsx_vsubi_bu CODE_FOR_subv16qi3
+#define CODE_FOR_lsx_vsubi_hu CODE_FOR_subv8hi3
+#define CODE_FOR_lsx_vsubi_wu CODE_FOR_subv4si3
+#define CODE_FOR_lsx_vsubi_du CODE_FOR_subv2di3
+
+#define CODE_FOR_lsx_vpackod_d CODE_FOR_lsx_vilvh_d
+#define CODE_FOR_lsx_vpackev_d CODE_FOR_lsx_vilvl_d
+#define CODE_FOR_lsx_vpickod_d CODE_FOR_lsx_vilvh_d
+#define CODE_FOR_lsx_vpickev_d CODE_FOR_lsx_vilvl_d
+
+#define CODE_FOR_lsx_vrepli_b CODE_FOR_lsx_vrepliv16qi
+#define CODE_FOR_lsx_vrepli_h CODE_FOR_lsx_vrepliv8hi
+#define CODE_FOR_lsx_vrepli_w CODE_FOR_lsx_vrepliv4si
+#define CODE_FOR_lsx_vrepli_d CODE_FOR_lsx_vrepliv2di
+#define CODE_FOR_lsx_vsat_b CODE_FOR_lsx_vsat_s_b
+#define CODE_FOR_lsx_vsat_h CODE_FOR_lsx_vsat_s_h
+#define CODE_FOR_lsx_vsat_w CODE_FOR_lsx_vsat_s_w
+#define CODE_FOR_lsx_vsat_d CODE_FOR_lsx_vsat_s_d
+#define CODE_FOR_lsx_vsat_bu CODE_FOR_lsx_vsat_u_bu
+#define CODE_FOR_lsx_vsat_hu CODE_FOR_lsx_vsat_u_hu
+#define CODE_FOR_lsx_vsat_wu CODE_FOR_lsx_vsat_u_wu
+#define CODE_FOR_lsx_vsat_du CODE_FOR_lsx_vsat_u_du
+#define CODE_FOR_lsx_vavg_b CODE_FOR_lsx_vavg_s_b
+#define CODE_FOR_lsx_vavg_h CODE_FOR_lsx_vavg_s_h
+#define CODE_FOR_lsx_vavg_w CODE_FOR_lsx_vavg_s_w
+#define CODE_FOR_lsx_vavg_d CODE_FOR_lsx_vavg_s_d
+#define CODE_FOR_lsx_vavg_bu CODE_FOR_lsx_vavg_u_bu
+#define CODE_FOR_lsx_vavg_hu CODE_FOR_lsx_vavg_u_hu
+#define CODE_FOR_lsx_vavg_wu CODE_FOR_lsx_vavg_u_wu
+#define CODE_FOR_lsx_vavg_du CODE_FOR_lsx_vavg_u_du
+#define CODE_FOR_lsx_vavgr_b CODE_FOR_lsx_vavgr_s_b
+#define CODE_FOR_lsx_vavgr_h CODE_FOR_lsx_vavgr_s_h
+#define CODE_FOR_lsx_vavgr_w CODE_FOR_lsx_vavgr_s_w
+#define CODE_FOR_lsx_vavgr_d CODE_FOR_lsx_vavgr_s_d
+#define CODE_FOR_lsx_vavgr_bu CODE_FOR_lsx_vavgr_u_bu
+#define CODE_FOR_lsx_vavgr_hu CODE_FOR_lsx_vavgr_u_hu
+#define CODE_FOR_lsx_vavgr_wu CODE_FOR_lsx_vavgr_u_wu
+#define CODE_FOR_lsx_vavgr_du CODE_FOR_lsx_vavgr_u_du
+#define CODE_FOR_lsx_vssub_b CODE_FOR_lsx_vssub_s_b
+#define CODE_FOR_lsx_vssub_h CODE_FOR_lsx_vssub_s_h
+#define CODE_FOR_lsx_vssub_w CODE_FOR_lsx_vssub_s_w
+#define CODE_FOR_lsx_vssub_d CODE_FOR_lsx_vssub_s_d
+#define CODE_FOR_lsx_vssub_bu CODE_FOR_lsx_vssub_u_bu
+#define CODE_FOR_lsx_vssub_hu CODE_FOR_lsx_vssub_u_hu
+#define CODE_FOR_lsx_vssub_wu CODE_FOR_lsx_vssub_u_wu
+#define CODE_FOR_lsx_vssub_du CODE_FOR_lsx_vssub_u_du
+#define CODE_FOR_lsx_vabsd_b CODE_FOR_lsx_vabsd_s_b
+#define CODE_FOR_lsx_vabsd_h CODE_FOR_lsx_vabsd_s_h
+#define CODE_FOR_lsx_vabsd_w CODE_FOR_lsx_vabsd_s_w
+#define CODE_FOR_lsx_vabsd_d CODE_FOR_lsx_vabsd_s_d
+#define CODE_FOR_lsx_vabsd_bu CODE_FOR_lsx_vabsd_u_bu
+#define CODE_FOR_lsx_vabsd_hu CODE_FOR_lsx_vabsd_u_hu
+#define CODE_FOR_lsx_vabsd_wu CODE_FOR_lsx_vabsd_u_wu
+#define CODE_FOR_lsx_vabsd_du CODE_FOR_lsx_vabsd_u_du
+#define CODE_FOR_lsx_vftint_w_s CODE_FOR_lsx_vftint_s_w_s
+#define CODE_FOR_lsx_vftint_l_d CODE_FOR_lsx_vftint_s_l_d
+#define CODE_FOR_lsx_vftint_wu_s CODE_FOR_lsx_vftint_u_wu_s
+#define CODE_FOR_lsx_vftint_lu_d CODE_FOR_lsx_vftint_u_lu_d
+#define CODE_FOR_lsx_vandn_v CODE_FOR_vandnv16qi3
+#define CODE_FOR_lsx_vorn_v CODE_FOR_vornv16qi3
+#define CODE_FOR_lsx_vneg_b CODE_FOR_vnegv16qi2
+#define CODE_FOR_lsx_vneg_h CODE_FOR_vnegv8hi2
+#define CODE_FOR_lsx_vneg_w CODE_FOR_vnegv4si2
+#define CODE_FOR_lsx_vneg_d CODE_FOR_vnegv2di2
+#define CODE_FOR_lsx_vshuf4i_d CODE_FOR_lsx_vshuf4i_d
+#define CODE_FOR_lsx_vbsrl_v CODE_FOR_lsx_vbsrl_b
+#define CODE_FOR_lsx_vbsll_v CODE_FOR_lsx_vbsll_b
+#define CODE_FOR_lsx_vfmadd_s CODE_FOR_fmav4sf4
+#define CODE_FOR_lsx_vfmadd_d CODE_FOR_fmav2df4
+#define CODE_FOR_lsx_vfmsub_s CODE_FOR_fmsv4sf4
+#define CODE_FOR_lsx_vfmsub_d CODE_FOR_fmsv2df4
+#define CODE_FOR_lsx_vfnmadd_s CODE_FOR_vfnmaddv4sf4_nmadd4
+#define CODE_FOR_lsx_vfnmadd_d CODE_FOR_vfnmaddv2df4_nmadd4
+#define CODE_FOR_lsx_vfnmsub_s CODE_FOR_vfnmsubv4sf4_nmsub4
+#define CODE_FOR_lsx_vfnmsub_d CODE_FOR_vfnmsubv2df4_nmsub4
+
+#define CODE_FOR_lsx_vmuh_b CODE_FOR_lsx_vmuh_s_b
+#define CODE_FOR_lsx_vmuh_h CODE_FOR_lsx_vmuh_s_h
+#define CODE_FOR_lsx_vmuh_w CODE_FOR_lsx_vmuh_s_w
+#define CODE_FOR_lsx_vmuh_d CODE_FOR_lsx_vmuh_s_d
+#define CODE_FOR_lsx_vmuh_bu CODE_FOR_lsx_vmuh_u_bu
+#define CODE_FOR_lsx_vmuh_hu CODE_FOR_lsx_vmuh_u_hu
+#define CODE_FOR_lsx_vmuh_wu CODE_FOR_lsx_vmuh_u_wu
+#define CODE_FOR_lsx_vmuh_du CODE_FOR_lsx_vmuh_u_du
+#define CODE_FOR_lsx_vsllwil_h_b CODE_FOR_lsx_vsllwil_s_h_b
+#define CODE_FOR_lsx_vsllwil_w_h CODE_FOR_lsx_vsllwil_s_w_h
+#define CODE_FOR_lsx_vsllwil_d_w CODE_FOR_lsx_vsllwil_s_d_w
+#define CODE_FOR_lsx_vsllwil_hu_bu CODE_FOR_lsx_vsllwil_u_hu_bu
+#define CODE_FOR_lsx_vsllwil_wu_hu CODE_FOR_lsx_vsllwil_u_wu_hu
+#define CODE_FOR_lsx_vsllwil_du_wu CODE_FOR_lsx_vsllwil_u_du_wu
+#define CODE_FOR_lsx_vssran_b_h CODE_FOR_lsx_vssran_s_b_h
+#define CODE_FOR_lsx_vssran_h_w CODE_FOR_lsx_vssran_s_h_w
+#define CODE_FOR_lsx_vssran_w_d CODE_FOR_lsx_vssran_s_w_d
+#define CODE_FOR_lsx_vssran_bu_h CODE_FOR_lsx_vssran_u_bu_h
+#define CODE_FOR_lsx_vssran_hu_w CODE_FOR_lsx_vssran_u_hu_w
+#define CODE_FOR_lsx_vssran_wu_d CODE_FOR_lsx_vssran_u_wu_d
+#define CODE_FOR_lsx_vssrarn_b_h CODE_FOR_lsx_vssrarn_s_b_h
+#define CODE_FOR_lsx_vssrarn_h_w CODE_FOR_lsx_vssrarn_s_h_w
+#define CODE_FOR_lsx_vssrarn_w_d CODE_FOR_lsx_vssrarn_s_w_d
+#define CODE_FOR_lsx_vssrarn_bu_h CODE_FOR_lsx_vssrarn_u_bu_h
+#define CODE_FOR_lsx_vssrarn_hu_w CODE_FOR_lsx_vssrarn_u_hu_w
+#define CODE_FOR_lsx_vssrarn_wu_d CODE_FOR_lsx_vssrarn_u_wu_d
+#define CODE_FOR_lsx_vssrln_bu_h CODE_FOR_lsx_vssrln_u_bu_h
+#define CODE_FOR_lsx_vssrln_hu_w CODE_FOR_lsx_vssrln_u_hu_w
+#define CODE_FOR_lsx_vssrln_wu_d CODE_FOR_lsx_vssrln_u_wu_d
+#define CODE_FOR_lsx_vssrlrn_bu_h CODE_FOR_lsx_vssrlrn_u_bu_h
+#define CODE_FOR_lsx_vssrlrn_hu_w CODE_FOR_lsx_vssrlrn_u_hu_w
+#define CODE_FOR_lsx_vssrlrn_wu_d CODE_FOR_lsx_vssrlrn_u_wu_d
+
static const struct loongarch_builtin_description loongarch_builtins[] = {
#define LARCH_MOVFCSR2GR 0
DIRECT_BUILTIN (movfcsr2gr, LARCH_USI_FTYPE_UQI, hard_float),
@@ -184,6 +489,727 @@ static const struct loongarch_builtin_description loongarch_builtins[] = {
DIRECT_NO_TARGET_BUILTIN (asrtgt_d, LARCH_VOID_FTYPE_DI_DI, default),
DIRECT_NO_TARGET_BUILTIN (syscall, LARCH_VOID_FTYPE_USI, default),
DIRECT_NO_TARGET_BUILTIN (break, LARCH_VOID_FTYPE_USI, default),
+
+ /* Built-in functions for LSX. */
+ LSX_BUILTIN (vsll_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsll_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsll_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsll_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vslli_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vslli_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vslli_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vslli_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsra_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsra_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsra_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsra_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsrai_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsrai_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsrai_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsrai_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsrar_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsrar_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrar_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrar_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsrari_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsrari_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsrari_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsrari_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsrl_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsrl_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrl_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrl_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsrli_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsrli_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsrli_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsrli_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsrlr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsrlr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrlr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrlr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsrlri_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsrlri_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsrlri_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsrlri_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vbitclr_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vbitclr_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vbitclr_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vbitclr_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vbitclri_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vbitclri_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vbitclri_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vbitclri_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vbitset_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vbitset_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vbitset_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vbitset_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vbitseti_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vbitseti_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vbitseti_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vbitseti_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vbitrev_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vbitrev_h, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vbitrev_w, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vbitrev_d, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vbitrevi_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vbitrevi_h, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vbitrevi_w, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vbitrevi_d, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vaddi_bu, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vaddi_hu, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vaddi_wu, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vaddi_du, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsub_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsub_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsub_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsub_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsubi_bu, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsubi_hu, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsubi_wu, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsubi_du, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vmax_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmax_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmax_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmax_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmaxi_b, LARCH_V16QI_FTYPE_V16QI_QI),
+ LSX_BUILTIN (vmaxi_h, LARCH_V8HI_FTYPE_V8HI_QI),
+ LSX_BUILTIN (vmaxi_w, LARCH_V4SI_FTYPE_V4SI_QI),
+ LSX_BUILTIN (vmaxi_d, LARCH_V2DI_FTYPE_V2DI_QI),
+ LSX_BUILTIN (vmax_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmax_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmax_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmax_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmaxi_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vmaxi_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vmaxi_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vmaxi_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vmin_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmin_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmin_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmin_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmini_b, LARCH_V16QI_FTYPE_V16QI_QI),
+ LSX_BUILTIN (vmini_h, LARCH_V8HI_FTYPE_V8HI_QI),
+ LSX_BUILTIN (vmini_w, LARCH_V4SI_FTYPE_V4SI_QI),
+ LSX_BUILTIN (vmini_d, LARCH_V2DI_FTYPE_V2DI_QI),
+ LSX_BUILTIN (vmin_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmin_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmin_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmin_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmini_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vmini_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vmini_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vmini_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vseq_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vseq_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vseq_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vseq_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vseqi_b, LARCH_V16QI_FTYPE_V16QI_QI),
+ LSX_BUILTIN (vseqi_h, LARCH_V8HI_FTYPE_V8HI_QI),
+ LSX_BUILTIN (vseqi_w, LARCH_V4SI_FTYPE_V4SI_QI),
+ LSX_BUILTIN (vseqi_d, LARCH_V2DI_FTYPE_V2DI_QI),
+ LSX_BUILTIN (vslti_b, LARCH_V16QI_FTYPE_V16QI_QI),
+ LSX_BUILTIN (vslt_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vslt_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vslt_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vslt_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vslti_h, LARCH_V8HI_FTYPE_V8HI_QI),
+ LSX_BUILTIN (vslti_w, LARCH_V4SI_FTYPE_V4SI_QI),
+ LSX_BUILTIN (vslti_d, LARCH_V2DI_FTYPE_V2DI_QI),
+ LSX_BUILTIN (vslt_bu, LARCH_V16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vslt_hu, LARCH_V8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vslt_wu, LARCH_V4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vslt_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vslti_bu, LARCH_V16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vslti_hu, LARCH_V8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vslti_wu, LARCH_V4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vslti_du, LARCH_V2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vsle_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsle_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsle_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsle_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vslei_b, LARCH_V16QI_FTYPE_V16QI_QI),
+ LSX_BUILTIN (vslei_h, LARCH_V8HI_FTYPE_V8HI_QI),
+ LSX_BUILTIN (vslei_w, LARCH_V4SI_FTYPE_V4SI_QI),
+ LSX_BUILTIN (vslei_d, LARCH_V2DI_FTYPE_V2DI_QI),
+ LSX_BUILTIN (vsle_bu, LARCH_V16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vsle_hu, LARCH_V8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vsle_wu, LARCH_V4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vsle_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vslei_bu, LARCH_V16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vslei_hu, LARCH_V8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vslei_wu, LARCH_V4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vslei_du, LARCH_V2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vsat_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsat_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsat_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsat_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vsat_bu, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vsat_hu, LARCH_UV8HI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vsat_wu, LARCH_UV4SI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vsat_du, LARCH_UV2DI_FTYPE_UV2DI_UQI),
+ LSX_BUILTIN (vadda_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vadda_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vadda_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vadda_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsadd_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vsadd_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vsadd_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vsadd_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vavg_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vavg_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vavg_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vavg_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vavg_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vavg_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vavg_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vavg_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vavgr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vavgr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vavgr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vavgr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vavgr_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vavgr_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vavgr_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vavgr_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vssub_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vssub_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vssub_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vssub_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssub_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vssub_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vssub_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vssub_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vabsd_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vabsd_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vabsd_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vabsd_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vabsd_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vabsd_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vabsd_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vabsd_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmul_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmul_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmul_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmul_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmadd_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+ LSX_BUILTIN (vmadd_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+ LSX_BUILTIN (vmadd_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+ LSX_BUILTIN (vmadd_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+ LSX_BUILTIN (vmsub_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+ LSX_BUILTIN (vmsub_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+ LSX_BUILTIN (vmsub_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+ LSX_BUILTIN (vmsub_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+ LSX_BUILTIN (vdiv_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vdiv_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vdiv_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vdiv_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vdiv_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vdiv_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vdiv_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vdiv_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vhaddw_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vhaddw_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vhaddw_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vhaddw_hu_bu, LARCH_UV8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vhaddw_wu_hu, LARCH_UV4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vhaddw_du_wu, LARCH_UV2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vhsubw_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vhsubw_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vhsubw_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vhsubw_hu_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vhsubw_wu_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vhsubw_du_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmod_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmod_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmod_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmod_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vreplve_b, LARCH_V16QI_FTYPE_V16QI_SI),
+ LSX_BUILTIN (vreplve_h, LARCH_V8HI_FTYPE_V8HI_SI),
+ LSX_BUILTIN (vreplve_w, LARCH_V4SI_FTYPE_V4SI_SI),
+ LSX_BUILTIN (vreplve_d, LARCH_V2DI_FTYPE_V2DI_SI),
+ LSX_BUILTIN (vreplvei_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vreplvei_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vreplvei_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vreplvei_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vpickev_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vpickev_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vpickev_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vpickev_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vpickod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vpickod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vpickod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vpickod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vilvh_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vilvh_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vilvh_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vilvh_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vilvl_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vilvl_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vilvl_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vilvl_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vpackev_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vpackev_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vpackev_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vpackev_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vpackod_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vpackod_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vpackod_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vpackod_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vshuf_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+ LSX_BUILTIN (vshuf_w, LARCH_V4SI_FTYPE_V4SI_V4SI_V4SI),
+ LSX_BUILTIN (vshuf_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+ LSX_BUILTIN (vand_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vandi_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vnor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vnori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vxor_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vxori_b, LARCH_UV16QI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vbitsel_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI_UV16QI),
+ LSX_BUILTIN (vbitseli_b, LARCH_UV16QI_FTYPE_UV16QI_UV16QI_USI),
+ LSX_BUILTIN (vshuf4i_b, LARCH_V16QI_FTYPE_V16QI_USI),
+ LSX_BUILTIN (vshuf4i_h, LARCH_V8HI_FTYPE_V8HI_USI),
+ LSX_BUILTIN (vshuf4i_w, LARCH_V4SI_FTYPE_V4SI_USI),
+ LSX_BUILTIN (vreplgr2vr_b, LARCH_V16QI_FTYPE_SI),
+ LSX_BUILTIN (vreplgr2vr_h, LARCH_V8HI_FTYPE_SI),
+ LSX_BUILTIN (vreplgr2vr_w, LARCH_V4SI_FTYPE_SI),
+ LSX_BUILTIN (vreplgr2vr_d, LARCH_V2DI_FTYPE_DI),
+ LSX_BUILTIN (vpcnt_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vpcnt_h, LARCH_V8HI_FTYPE_V8HI),
+ LSX_BUILTIN (vpcnt_w, LARCH_V4SI_FTYPE_V4SI),
+ LSX_BUILTIN (vpcnt_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vclo_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vclo_h, LARCH_V8HI_FTYPE_V8HI),
+ LSX_BUILTIN (vclo_w, LARCH_V4SI_FTYPE_V4SI),
+ LSX_BUILTIN (vclo_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vclz_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vclz_h, LARCH_V8HI_FTYPE_V8HI),
+ LSX_BUILTIN (vclz_w, LARCH_V4SI_FTYPE_V4SI),
+ LSX_BUILTIN (vclz_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vpickve2gr_b, LARCH_SI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vpickve2gr_h, LARCH_SI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vpickve2gr_w, LARCH_SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vpickve2gr_d, LARCH_DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vpickve2gr_bu, LARCH_USI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vpickve2gr_hu, LARCH_USI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vpickve2gr_wu, LARCH_USI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vpickve2gr_du, LARCH_UDI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vinsgr2vr_b, LARCH_V16QI_FTYPE_V16QI_SI_UQI),
+ LSX_BUILTIN (vinsgr2vr_h, LARCH_V8HI_FTYPE_V8HI_SI_UQI),
+ LSX_BUILTIN (vinsgr2vr_w, LARCH_V4SI_FTYPE_V4SI_SI_UQI),
+ LSX_BUILTIN (vinsgr2vr_d, LARCH_V2DI_FTYPE_V2DI_DI_UQI),
+ LSX_BUILTIN_TEST_BRANCH (bnz_b, LARCH_SI_FTYPE_UV16QI),
+ LSX_BUILTIN_TEST_BRANCH (bnz_h, LARCH_SI_FTYPE_UV8HI),
+ LSX_BUILTIN_TEST_BRANCH (bnz_w, LARCH_SI_FTYPE_UV4SI),
+ LSX_BUILTIN_TEST_BRANCH (bnz_d, LARCH_SI_FTYPE_UV2DI),
+ LSX_BUILTIN_TEST_BRANCH (bz_b, LARCH_SI_FTYPE_UV16QI),
+ LSX_BUILTIN_TEST_BRANCH (bz_h, LARCH_SI_FTYPE_UV8HI),
+ LSX_BUILTIN_TEST_BRANCH (bz_w, LARCH_SI_FTYPE_UV4SI),
+ LSX_BUILTIN_TEST_BRANCH (bz_d, LARCH_SI_FTYPE_UV2DI),
+ LSX_BUILTIN_TEST_BRANCH (bz_v, LARCH_SI_FTYPE_UV16QI),
+ LSX_BUILTIN_TEST_BRANCH (bnz_v, LARCH_SI_FTYPE_UV16QI),
+ LSX_BUILTIN (vrepli_b, LARCH_V16QI_FTYPE_HI),
+ LSX_BUILTIN (vrepli_h, LARCH_V8HI_FTYPE_HI),
+ LSX_BUILTIN (vrepli_w, LARCH_V4SI_FTYPE_HI),
+ LSX_BUILTIN (vrepli_d, LARCH_V2DI_FTYPE_HI),
+ LSX_BUILTIN (vfcmp_caf_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_caf_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cor_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cor_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cun_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cun_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cune_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cune_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cueq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cueq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_ceq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_ceq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cne_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cne_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_clt_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_clt_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cult_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cult_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cle_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cle_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_cule_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_cule_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_saf_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_saf_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sor_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sor_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sun_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sun_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sune_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sune_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sueq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sueq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_seq_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_seq_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sne_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sne_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_slt_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_slt_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sult_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sult_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sle_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sle_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcmp_sule_s, LARCH_V4SI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcmp_sule_d, LARCH_V2DI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfmul_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfmul_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfdiv_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfdiv_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfcvt_h_s, LARCH_V8HI_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfcvt_s_d, LARCH_V4SF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfmin_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfmin_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfmina_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfmina_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfmax_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfmax_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfmaxa_s, LARCH_V4SF_FTYPE_V4SF_V4SF),
+ LSX_BUILTIN (vfmaxa_d, LARCH_V2DF_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vfclass_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vfclass_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vfsqrt_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfsqrt_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrecip_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrecip_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrint_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrint_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrsqrt_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrsqrt_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vflogb_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vflogb_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfcvth_s_h, LARCH_V4SF_FTYPE_V8HI),
+ LSX_BUILTIN (vfcvth_d_s, LARCH_V2DF_FTYPE_V4SF),
+ LSX_BUILTIN (vfcvtl_s_h, LARCH_V4SF_FTYPE_V8HI),
+ LSX_BUILTIN (vfcvtl_d_s, LARCH_V2DF_FTYPE_V4SF),
+ LSX_BUILTIN (vftint_w_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftint_l_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftint_wu_s, LARCH_UV4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftint_lu_d, LARCH_UV2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftintrz_w_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrz_l_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftintrz_wu_s, LARCH_UV4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrz_lu_d, LARCH_UV2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vffint_s_w, LARCH_V4SF_FTYPE_V4SI),
+ LSX_BUILTIN (vffint_d_l, LARCH_V2DF_FTYPE_V2DI),
+ LSX_BUILTIN (vffint_s_wu, LARCH_V4SF_FTYPE_UV4SI),
+ LSX_BUILTIN (vffint_d_lu, LARCH_V2DF_FTYPE_UV2DI),
+
+ LSX_BUILTIN (vandn_v, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vneg_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vneg_h, LARCH_V8HI_FTYPE_V8HI),
+ LSX_BUILTIN (vneg_w, LARCH_V4SI_FTYPE_V4SI),
+ LSX_BUILTIN (vneg_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vmuh_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmuh_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmuh_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmuh_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmuh_bu, LARCH_UV16QI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmuh_hu, LARCH_UV8HI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmuh_wu, LARCH_UV4SI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmuh_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsllwil_h_b, LARCH_V8HI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vsllwil_w_h, LARCH_V4SI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vsllwil_d_w, LARCH_V2DI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vsllwil_hu_bu, LARCH_UV8HI_FTYPE_UV16QI_UQI),
+ LSX_BUILTIN (vsllwil_wu_hu, LARCH_UV4SI_FTYPE_UV8HI_UQI),
+ LSX_BUILTIN (vsllwil_du_wu, LARCH_UV2DI_FTYPE_UV4SI_UQI),
+ LSX_BUILTIN (vsran_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsran_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsran_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssran_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vssran_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vssran_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssran_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vssran_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vssran_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsrarn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrarn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrarn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssrarn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vssrarn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vssrarn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssrarn_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vssrarn_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vssrarn_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsrln_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrln_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrln_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssrln_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vssrln_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vssrln_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsrlrn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsrlrn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsrlrn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssrlrn_bu_h, LARCH_UV16QI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vssrlrn_hu_w, LARCH_UV8HI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vssrlrn_wu_d, LARCH_UV4SI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vfrstpi_b, LARCH_V16QI_FTYPE_V16QI_V16QI_UQI),
+ LSX_BUILTIN (vfrstpi_h, LARCH_V8HI_FTYPE_V8HI_V8HI_UQI),
+ LSX_BUILTIN (vfrstp_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+ LSX_BUILTIN (vfrstp_h, LARCH_V8HI_FTYPE_V8HI_V8HI_V8HI),
+ LSX_BUILTIN (vshuf4i_d, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vbsrl_v, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vbsll_v, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vextrins_b, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vextrins_h, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vextrins_w, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vextrins_d, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vmskltz_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vmskltz_h, LARCH_V8HI_FTYPE_V8HI),
+ LSX_BUILTIN (vmskltz_w, LARCH_V4SI_FTYPE_V4SI),
+ LSX_BUILTIN (vmskltz_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vsigncov_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsigncov_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsigncov_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsigncov_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vfmadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+ LSX_BUILTIN (vfmadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+ LSX_BUILTIN (vfmsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+ LSX_BUILTIN (vfmsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+ LSX_BUILTIN (vfnmadd_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+ LSX_BUILTIN (vfnmadd_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+ LSX_BUILTIN (vfnmsub_s, LARCH_V4SF_FTYPE_V4SF_V4SF_V4SF),
+ LSX_BUILTIN (vfnmsub_d, LARCH_V2DF_FTYPE_V2DF_V2DF_V2DF),
+ LSX_BUILTIN (vftintrne_w_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrne_l_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftintrp_w_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrp_l_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftintrm_w_s, LARCH_V4SI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrm_l_d, LARCH_V2DI_FTYPE_V2DF),
+ LSX_BUILTIN (vftint_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vffint_s_l, LARCH_V4SF_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vftintrz_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vftintrp_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vftintrm_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vftintrne_w_d, LARCH_V4SI_FTYPE_V2DF_V2DF),
+ LSX_BUILTIN (vftintl_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftinth_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vffinth_d_w, LARCH_V2DF_FTYPE_V4SI),
+ LSX_BUILTIN (vffintl_d_w, LARCH_V2DF_FTYPE_V4SI),
+ LSX_BUILTIN (vftintrzl_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrzh_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrpl_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrph_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrml_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrmh_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrnel_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vftintrneh_l_s, LARCH_V2DI_FTYPE_V4SF),
+ LSX_BUILTIN (vfrintrne_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrintrne_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrintrz_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrintrz_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrintrp_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrintrp_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_BUILTIN (vfrintrm_s, LARCH_V4SF_FTYPE_V4SF),
+ LSX_BUILTIN (vfrintrm_d, LARCH_V2DF_FTYPE_V2DF),
+ LSX_NO_TARGET_BUILTIN (vstelm_b, LARCH_VOID_FTYPE_V16QI_CVPOINTER_SI_UQI),
+ LSX_NO_TARGET_BUILTIN (vstelm_h, LARCH_VOID_FTYPE_V8HI_CVPOINTER_SI_UQI),
+ LSX_NO_TARGET_BUILTIN (vstelm_w, LARCH_VOID_FTYPE_V4SI_CVPOINTER_SI_UQI),
+ LSX_NO_TARGET_BUILTIN (vstelm_d, LARCH_VOID_FTYPE_V2DI_CVPOINTER_SI_UQI),
+ LSX_BUILTIN (vaddwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vaddwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vaddwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vaddwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vaddwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vaddwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vaddwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vaddwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vaddwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vaddwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vaddwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vaddwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vaddwev_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+ LSX_BUILTIN (vaddwev_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+ LSX_BUILTIN (vaddwev_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+ LSX_BUILTIN (vaddwod_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+ LSX_BUILTIN (vaddwod_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+ LSX_BUILTIN (vaddwod_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+ LSX_BUILTIN (vsubwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsubwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsubwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsubwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vsubwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vsubwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vsubwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vsubwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vsubwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vsubwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vsubwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vsubwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vaddwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vaddwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vaddwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vaddwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsubwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsubwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsubwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vsubwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vaddwev_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+ LSX_BUILTIN (vaddwod_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+
+ LSX_BUILTIN (vmulwev_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmulwev_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmulwev_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmulwod_d_w, LARCH_V2DI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vmulwod_w_h, LARCH_V4SI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vmulwod_h_b, LARCH_V8HI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vmulwev_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmulwev_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmulwev_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmulwod_d_wu, LARCH_V2DI_FTYPE_UV4SI_UV4SI),
+ LSX_BUILTIN (vmulwod_w_hu, LARCH_V4SI_FTYPE_UV8HI_UV8HI),
+ LSX_BUILTIN (vmulwod_h_bu, LARCH_V8HI_FTYPE_UV16QI_UV16QI),
+ LSX_BUILTIN (vmulwev_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+ LSX_BUILTIN (vmulwev_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+ LSX_BUILTIN (vmulwev_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+ LSX_BUILTIN (vmulwod_d_wu_w, LARCH_V2DI_FTYPE_UV4SI_V4SI),
+ LSX_BUILTIN (vmulwod_w_hu_h, LARCH_V4SI_FTYPE_UV8HI_V8HI),
+ LSX_BUILTIN (vmulwod_h_bu_b, LARCH_V8HI_FTYPE_UV16QI_V16QI),
+ LSX_BUILTIN (vmulwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmulwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vmulwev_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmulwod_q_du, LARCH_V2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmulwev_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+ LSX_BUILTIN (vmulwod_q_du_d, LARCH_V2DI_FTYPE_UV2DI_V2DI),
+ LSX_BUILTIN (vhaddw_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vhaddw_qu_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vhsubw_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vhsubw_qu_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI),
+ LSX_BUILTIN (vmaddwev_d_w, LARCH_V2DI_FTYPE_V2DI_V4SI_V4SI),
+ LSX_BUILTIN (vmaddwev_w_h, LARCH_V4SI_FTYPE_V4SI_V8HI_V8HI),
+ LSX_BUILTIN (vmaddwev_h_b, LARCH_V8HI_FTYPE_V8HI_V16QI_V16QI),
+ LSX_BUILTIN (vmaddwev_d_wu, LARCH_UV2DI_FTYPE_UV2DI_UV4SI_UV4SI),
+ LSX_BUILTIN (vmaddwev_w_hu, LARCH_UV4SI_FTYPE_UV4SI_UV8HI_UV8HI),
+ LSX_BUILTIN (vmaddwev_h_bu, LARCH_UV8HI_FTYPE_UV8HI_UV16QI_UV16QI),
+ LSX_BUILTIN (vmaddwod_d_w, LARCH_V2DI_FTYPE_V2DI_V4SI_V4SI),
+ LSX_BUILTIN (vmaddwod_w_h, LARCH_V4SI_FTYPE_V4SI_V8HI_V8HI),
+ LSX_BUILTIN (vmaddwod_h_b, LARCH_V8HI_FTYPE_V8HI_V16QI_V16QI),
+ LSX_BUILTIN (vmaddwod_d_wu, LARCH_UV2DI_FTYPE_UV2DI_UV4SI_UV4SI),
+ LSX_BUILTIN (vmaddwod_w_hu, LARCH_UV4SI_FTYPE_UV4SI_UV8HI_UV8HI),
+ LSX_BUILTIN (vmaddwod_h_bu, LARCH_UV8HI_FTYPE_UV8HI_UV16QI_UV16QI),
+ LSX_BUILTIN (vmaddwev_d_wu_w, LARCH_V2DI_FTYPE_V2DI_UV4SI_V4SI),
+ LSX_BUILTIN (vmaddwev_w_hu_h, LARCH_V4SI_FTYPE_V4SI_UV8HI_V8HI),
+ LSX_BUILTIN (vmaddwev_h_bu_b, LARCH_V8HI_FTYPE_V8HI_UV16QI_V16QI),
+ LSX_BUILTIN (vmaddwod_d_wu_w, LARCH_V2DI_FTYPE_V2DI_UV4SI_V4SI),
+ LSX_BUILTIN (vmaddwod_w_hu_h, LARCH_V4SI_FTYPE_V4SI_UV8HI_V8HI),
+ LSX_BUILTIN (vmaddwod_h_bu_b, LARCH_V8HI_FTYPE_V8HI_UV16QI_V16QI),
+ LSX_BUILTIN (vmaddwev_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+ LSX_BUILTIN (vmaddwod_q_d, LARCH_V2DI_FTYPE_V2DI_V2DI_V2DI),
+ LSX_BUILTIN (vmaddwev_q_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI_UV2DI),
+ LSX_BUILTIN (vmaddwod_q_du, LARCH_UV2DI_FTYPE_UV2DI_UV2DI_UV2DI),
+ LSX_BUILTIN (vmaddwev_q_du_d, LARCH_V2DI_FTYPE_V2DI_UV2DI_V2DI),
+ LSX_BUILTIN (vmaddwod_q_du_d, LARCH_V2DI_FTYPE_V2DI_UV2DI_V2DI),
+ LSX_BUILTIN (vrotr_b, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vrotr_h, LARCH_V8HI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vrotr_w, LARCH_V4SI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vrotr_d, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vadd_q, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vsub_q, LARCH_V2DI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vldrepl_b, LARCH_V16QI_FTYPE_CVPOINTER_SI),
+ LSX_BUILTIN (vldrepl_h, LARCH_V8HI_FTYPE_CVPOINTER_SI),
+ LSX_BUILTIN (vldrepl_w, LARCH_V4SI_FTYPE_CVPOINTER_SI),
+ LSX_BUILTIN (vldrepl_d, LARCH_V2DI_FTYPE_CVPOINTER_SI),
+
+ LSX_BUILTIN (vmskgez_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vmsknz_b, LARCH_V16QI_FTYPE_V16QI),
+ LSX_BUILTIN (vexth_h_b, LARCH_V8HI_FTYPE_V16QI),
+ LSX_BUILTIN (vexth_w_h, LARCH_V4SI_FTYPE_V8HI),
+ LSX_BUILTIN (vexth_d_w, LARCH_V2DI_FTYPE_V4SI),
+ LSX_BUILTIN (vexth_q_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vexth_hu_bu, LARCH_UV8HI_FTYPE_UV16QI),
+ LSX_BUILTIN (vexth_wu_hu, LARCH_UV4SI_FTYPE_UV8HI),
+ LSX_BUILTIN (vexth_du_wu, LARCH_UV2DI_FTYPE_UV4SI),
+ LSX_BUILTIN (vexth_qu_du, LARCH_UV2DI_FTYPE_UV2DI),
+ LSX_BUILTIN (vrotri_b, LARCH_V16QI_FTYPE_V16QI_UQI),
+ LSX_BUILTIN (vrotri_h, LARCH_V8HI_FTYPE_V8HI_UQI),
+ LSX_BUILTIN (vrotri_w, LARCH_V4SI_FTYPE_V4SI_UQI),
+ LSX_BUILTIN (vrotri_d, LARCH_V2DI_FTYPE_V2DI_UQI),
+ LSX_BUILTIN (vextl_q_d, LARCH_V2DI_FTYPE_V2DI),
+ LSX_BUILTIN (vsrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vsrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vsrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vsrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vsrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vsrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vsrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vsrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrlni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vssrlni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vssrlni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vssrlni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrlni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+ LSX_BUILTIN (vssrlni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+ LSX_BUILTIN (vssrlni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+ LSX_BUILTIN (vssrlni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+ LSX_BUILTIN (vssrlrni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vssrlrni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vssrlrni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vssrlrni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrlrni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+ LSX_BUILTIN (vssrlrni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+ LSX_BUILTIN (vssrlrni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+ LSX_BUILTIN (vssrlrni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+ LSX_BUILTIN (vsrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vsrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vsrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vsrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vsrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vsrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vsrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vsrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrani_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vssrani_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vssrani_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vssrani_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrani_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+ LSX_BUILTIN (vssrani_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+ LSX_BUILTIN (vssrani_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+ LSX_BUILTIN (vssrani_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+ LSX_BUILTIN (vssrarni_b_h, LARCH_V16QI_FTYPE_V16QI_V16QI_USI),
+ LSX_BUILTIN (vssrarni_h_w, LARCH_V8HI_FTYPE_V8HI_V8HI_USI),
+ LSX_BUILTIN (vssrarni_w_d, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vssrarni_d_q, LARCH_V2DI_FTYPE_V2DI_V2DI_USI),
+ LSX_BUILTIN (vssrarni_bu_h, LARCH_UV16QI_FTYPE_UV16QI_V16QI_USI),
+ LSX_BUILTIN (vssrarni_hu_w, LARCH_UV8HI_FTYPE_UV8HI_V8HI_USI),
+ LSX_BUILTIN (vssrarni_wu_d, LARCH_UV4SI_FTYPE_UV4SI_V4SI_USI),
+ LSX_BUILTIN (vssrarni_du_q, LARCH_UV2DI_FTYPE_UV2DI_V2DI_USI),
+ LSX_BUILTIN (vpermi_w, LARCH_V4SI_FTYPE_V4SI_V4SI_USI),
+ LSX_BUILTIN (vld, LARCH_V16QI_FTYPE_CVPOINTER_SI),
+ LSX_NO_TARGET_BUILTIN (vst, LARCH_VOID_FTYPE_V16QI_CVPOINTER_SI),
+ LSX_BUILTIN (vssrlrn_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vssrlrn_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vssrlrn_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vssrln_b_h, LARCH_V16QI_FTYPE_V8HI_V8HI),
+ LSX_BUILTIN (vssrln_h_w, LARCH_V8HI_FTYPE_V4SI_V4SI),
+ LSX_BUILTIN (vssrln_w_d, LARCH_V4SI_FTYPE_V2DI_V2DI),
+ LSX_BUILTIN (vorn_v, LARCH_V16QI_FTYPE_V16QI_V16QI),
+ LSX_BUILTIN (vldi, LARCH_V2DI_FTYPE_HI),
+ LSX_BUILTIN (vshuf_b, LARCH_V16QI_FTYPE_V16QI_V16QI_V16QI),
+ LSX_BUILTIN (vldx, LARCH_V16QI_FTYPE_CVPOINTER_DI),
+ LSX_NO_TARGET_BUILTIN (vstx, LARCH_VOID_FTYPE_V16QI_CVPOINTER_DI),
+ LSX_BUILTIN (vextl_qu_du, LARCH_UV2DI_FTYPE_UV2DI)
};
/* Index I is the function declaration for loongarch_builtins[I], or null if
@@ -193,11 +1219,46 @@ static GTY (()) tree loongarch_builtin_decls[ARRAY_SIZE (loongarch_builtins)];
using the instruction code or return null if not defined for the target. */
static GTY (()) int loongarch_get_builtin_decl_index[NUM_INSN_CODES];
+
+/* MODE is a vector mode whose elements have type TYPE. Return the type
+ of the vector itself. */
+
+static tree
+loongarch_builtin_vector_type (tree type, machine_mode mode)
+{
+ static tree types[2 * (int) MAX_MACHINE_MODE];
+ int mode_index;
+
+ mode_index = (int) mode;
+
+ if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type))
+ mode_index += MAX_MACHINE_MODE;
+
+ if (types[mode_index] == NULL_TREE)
+ types[mode_index] = build_vector_type_for_mode (type, mode);
+ return types[mode_index];
+}
+
+/* Return a type for 'const volatile void *'. */
+
+static tree
+loongarch_build_cvpointer_type (void)
+{
+ static tree cache;
+
+ if (cache == NULL_TREE)
+ cache = build_pointer_type (build_qualified_type (void_type_node,
+ TYPE_QUAL_CONST
+ | TYPE_QUAL_VOLATILE));
+ return cache;
+}
+
/* Source-level argument types. */
#define LARCH_ATYPE_VOID void_type_node
#define LARCH_ATYPE_INT integer_type_node
#define LARCH_ATYPE_POINTER ptr_type_node
-
+#define LARCH_ATYPE_CVPOINTER loongarch_build_cvpointer_type ()
+#define LARCH_ATYPE_BOOLEAN boolean_type_node
/* Standard mode-based argument types. */
#define LARCH_ATYPE_QI intQI_type_node
#define LARCH_ATYPE_UQI unsigned_intQI_type_node
@@ -210,6 +1271,72 @@ static GTY (()) int loongarch_get_builtin_decl_index[NUM_INSN_CODES];
#define LARCH_ATYPE_SF float_type_node
#define LARCH_ATYPE_DF double_type_node
+/* Vector argument types. */
+#define LARCH_ATYPE_V2SF \
+ loongarch_builtin_vector_type (float_type_node, V2SFmode)
+#define LARCH_ATYPE_V2HI \
+ loongarch_builtin_vector_type (intHI_type_node, V2HImode)
+#define LARCH_ATYPE_V2SI \
+ loongarch_builtin_vector_type (intSI_type_node, V2SImode)
+#define LARCH_ATYPE_V4QI \
+ loongarch_builtin_vector_type (intQI_type_node, V4QImode)
+#define LARCH_ATYPE_V4HI \
+ loongarch_builtin_vector_type (intHI_type_node, V4HImode)
+#define LARCH_ATYPE_V8QI \
+ loongarch_builtin_vector_type (intQI_type_node, V8QImode)
+
+#define LARCH_ATYPE_V2DI \
+ loongarch_builtin_vector_type (long_long_integer_type_node, V2DImode)
+#define LARCH_ATYPE_V4SI \
+ loongarch_builtin_vector_type (intSI_type_node, V4SImode)
+#define LARCH_ATYPE_V8HI \
+ loongarch_builtin_vector_type (intHI_type_node, V8HImode)
+#define LARCH_ATYPE_V16QI \
+ loongarch_builtin_vector_type (intQI_type_node, V16QImode)
+#define LARCH_ATYPE_V2DF \
+ loongarch_builtin_vector_type (double_type_node, V2DFmode)
+#define LARCH_ATYPE_V4SF \
+ loongarch_builtin_vector_type (float_type_node, V4SFmode)
+
+/* LoongArch ASX. */
+#define LARCH_ATYPE_V4DI \
+ loongarch_builtin_vector_type (long_long_integer_type_node, V4DImode)
+#define LARCH_ATYPE_V8SI \
+ loongarch_builtin_vector_type (intSI_type_node, V8SImode)
+#define LARCH_ATYPE_V16HI \
+ loongarch_builtin_vector_type (intHI_type_node, V16HImode)
+#define LARCH_ATYPE_V32QI \
+ loongarch_builtin_vector_type (intQI_type_node, V32QImode)
+#define LARCH_ATYPE_V4DF \
+ loongarch_builtin_vector_type (double_type_node, V4DFmode)
+#define LARCH_ATYPE_V8SF \
+ loongarch_builtin_vector_type (float_type_node, V8SFmode)
+
+#define LARCH_ATYPE_UV2DI \
+ loongarch_builtin_vector_type (long_long_unsigned_type_node, V2DImode)
+#define LARCH_ATYPE_UV4SI \
+ loongarch_builtin_vector_type (unsigned_intSI_type_node, V4SImode)
+#define LARCH_ATYPE_UV8HI \
+ loongarch_builtin_vector_type (unsigned_intHI_type_node, V8HImode)
+#define LARCH_ATYPE_UV16QI \
+ loongarch_builtin_vector_type (unsigned_intQI_type_node, V16QImode)
+
+#define LARCH_ATYPE_UV4DI \
+ loongarch_builtin_vector_type (long_long_unsigned_type_node, V4DImode)
+#define LARCH_ATYPE_UV8SI \
+ loongarch_builtin_vector_type (unsigned_intSI_type_node, V8SImode)
+#define LARCH_ATYPE_UV16HI \
+ loongarch_builtin_vector_type (unsigned_intHI_type_node, V16HImode)
+#define LARCH_ATYPE_UV32QI \
+ loongarch_builtin_vector_type (unsigned_intQI_type_node, V32QImode)
+
+#define LARCH_ATYPE_UV2SI \
+ loongarch_builtin_vector_type (unsigned_intSI_type_node, V2SImode)
+#define LARCH_ATYPE_UV4HI \
+ loongarch_builtin_vector_type (unsigned_intHI_type_node, V4HImode)
+#define LARCH_ATYPE_UV8QI \
+ loongarch_builtin_vector_type (unsigned_intQI_type_node, V8QImode)
+
/* LARCH_FTYPE_ATYPESN takes N LARCH_FTYPES-like type codes and lists
their associated LARCH_ATYPEs. */
#define LARCH_FTYPE_ATYPES1(A, B) LARCH_ATYPE_##A, LARCH_ATYPE_##B
@@ -288,6 +1415,92 @@ loongarch_builtin_decl (unsigned int code, bool initialize_p ATTRIBUTE_UNUSED)
return loongarch_builtin_decls[code];
}
+/* Implement TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION. */
+
+tree
+loongarch_builtin_vectorized_function (unsigned int fn, tree type_out,
+ tree type_in)
+{
+ machine_mode in_mode, out_mode;
+ int in_n, out_n;
+
+ if (TREE_CODE (type_out) != VECTOR_TYPE
+ || TREE_CODE (type_in) != VECTOR_TYPE
+ || !ISA_HAS_LSX)
+ return NULL_TREE;
+
+ out_mode = TYPE_MODE (TREE_TYPE (type_out));
+ out_n = TYPE_VECTOR_SUBPARTS (type_out);
+ in_mode = TYPE_MODE (TREE_TYPE (type_in));
+ in_n = TYPE_VECTOR_SUBPARTS (type_in);
+
+ /* INSN is the name of the associated instruction pattern, without
+ the leading CODE_FOR_. */
+#define LARCH_GET_BUILTIN(INSN) \
+ loongarch_builtin_decls[loongarch_get_builtin_decl_index[CODE_FOR_##INSN]]
+
+ switch (fn)
+ {
+ CASE_CFN_CEIL:
+ if (out_mode == DFmode && in_mode == DFmode)
+ {
+ if (out_n == 2 && in_n == 2)
+ return LARCH_GET_BUILTIN (lsx_vfrintrp_d);
+ }
+ if (out_mode == SFmode && in_mode == SFmode)
+ {
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lsx_vfrintrp_s);
+ }
+ break;
+
+ CASE_CFN_TRUNC:
+ if (out_mode == DFmode && in_mode == DFmode)
+ {
+ if (out_n == 2 && in_n == 2)
+ return LARCH_GET_BUILTIN (lsx_vfrintrz_d);
+ }
+ if (out_mode == SFmode && in_mode == SFmode)
+ {
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lsx_vfrintrz_s);
+ }
+ break;
+
+ CASE_CFN_RINT:
+ CASE_CFN_ROUND:
+ if (out_mode == DFmode && in_mode == DFmode)
+ {
+ if (out_n == 2 && in_n == 2)
+ return LARCH_GET_BUILTIN (lsx_vfrint_d);
+ }
+ if (out_mode == SFmode && in_mode == SFmode)
+ {
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lsx_vfrint_s);
+ }
+ break;
+
+ CASE_CFN_FLOOR:
+ if (out_mode == DFmode && in_mode == DFmode)
+ {
+ if (out_n == 2 && in_n == 2)
+ return LARCH_GET_BUILTIN (lsx_vfrintrm_d);
+ }
+ if (out_mode == SFmode && in_mode == SFmode)
+ {
+ if (out_n == 4 && in_n == 4)
+ return LARCH_GET_BUILTIN (lsx_vfrintrm_s);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
/* Take argument ARGNO from EXP's argument list and convert it into
an expand operand. Store the operand in *OP. */
@@ -323,7 +1536,236 @@ static rtx
loongarch_expand_builtin_insn (enum insn_code icode, unsigned int nops,
struct expand_operand *ops, bool has_target_p)
{
- if (!maybe_expand_insn (icode, nops, ops))
+ machine_mode imode;
+ int rangelo = 0, rangehi = 0, error_opno = 0;
+
+ switch (icode)
+ {
+ case CODE_FOR_lsx_vaddi_bu:
+ case CODE_FOR_lsx_vaddi_hu:
+ case CODE_FOR_lsx_vaddi_wu:
+ case CODE_FOR_lsx_vaddi_du:
+ case CODE_FOR_lsx_vslti_bu:
+ case CODE_FOR_lsx_vslti_hu:
+ case CODE_FOR_lsx_vslti_wu:
+ case CODE_FOR_lsx_vslti_du:
+ case CODE_FOR_lsx_vslei_bu:
+ case CODE_FOR_lsx_vslei_hu:
+ case CODE_FOR_lsx_vslei_wu:
+ case CODE_FOR_lsx_vslei_du:
+ case CODE_FOR_lsx_vmaxi_bu:
+ case CODE_FOR_lsx_vmaxi_hu:
+ case CODE_FOR_lsx_vmaxi_wu:
+ case CODE_FOR_lsx_vmaxi_du:
+ case CODE_FOR_lsx_vmini_bu:
+ case CODE_FOR_lsx_vmini_hu:
+ case CODE_FOR_lsx_vmini_wu:
+ case CODE_FOR_lsx_vmini_du:
+ case CODE_FOR_lsx_vsubi_bu:
+ case CODE_FOR_lsx_vsubi_hu:
+ case CODE_FOR_lsx_vsubi_wu:
+ case CODE_FOR_lsx_vsubi_du:
+ gcc_assert (has_target_p && nops == 3);
+ /* We only generate a vector of constants iff the second argument
+ is an immediate. We also validate the range of the immediate. */
+ if (CONST_INT_P (ops[2].value))
+ {
+ rangelo = 0;
+ rangehi = 31;
+ if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+ {
+ ops[2].mode = ops[0].mode;
+ ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+ INTVAL (ops[2].value));
+ }
+ else
+ error_opno = 2;
+ }
+ break;
+
+ case CODE_FOR_lsx_vseqi_b:
+ case CODE_FOR_lsx_vseqi_h:
+ case CODE_FOR_lsx_vseqi_w:
+ case CODE_FOR_lsx_vseqi_d:
+ case CODE_FOR_lsx_vslti_b:
+ case CODE_FOR_lsx_vslti_h:
+ case CODE_FOR_lsx_vslti_w:
+ case CODE_FOR_lsx_vslti_d:
+ case CODE_FOR_lsx_vslei_b:
+ case CODE_FOR_lsx_vslei_h:
+ case CODE_FOR_lsx_vslei_w:
+ case CODE_FOR_lsx_vslei_d:
+ case CODE_FOR_lsx_vmaxi_b:
+ case CODE_FOR_lsx_vmaxi_h:
+ case CODE_FOR_lsx_vmaxi_w:
+ case CODE_FOR_lsx_vmaxi_d:
+ case CODE_FOR_lsx_vmini_b:
+ case CODE_FOR_lsx_vmini_h:
+ case CODE_FOR_lsx_vmini_w:
+ case CODE_FOR_lsx_vmini_d:
+ gcc_assert (has_target_p && nops == 3);
+ /* We only generate a vector of constants iff the second argument
+ is an immediate. We also validate the range of the immediate. */
+ if (CONST_INT_P (ops[2].value))
+ {
+ rangelo = -16;
+ rangehi = 15;
+ if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+ {
+ ops[2].mode = ops[0].mode;
+ ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+ INTVAL (ops[2].value));
+ }
+ else
+ error_opno = 2;
+ }
+ break;
+
+ case CODE_FOR_lsx_vandi_b:
+ case CODE_FOR_lsx_vori_b:
+ case CODE_FOR_lsx_vnori_b:
+ case CODE_FOR_lsx_vxori_b:
+ gcc_assert (has_target_p && nops == 3);
+ if (!CONST_INT_P (ops[2].value))
+ break;
+ ops[2].mode = ops[0].mode;
+ ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+ INTVAL (ops[2].value));
+ break;
+
+ case CODE_FOR_lsx_vbitseli_b:
+ gcc_assert (has_target_p && nops == 4);
+ if (!CONST_INT_P (ops[3].value))
+ break;
+ ops[3].mode = ops[0].mode;
+ ops[3].value = loongarch_gen_const_int_vector (ops[3].mode,
+ INTVAL (ops[3].value));
+ break;
+
+ case CODE_FOR_lsx_vreplgr2vr_b:
+ case CODE_FOR_lsx_vreplgr2vr_h:
+ case CODE_FOR_lsx_vreplgr2vr_w:
+ case CODE_FOR_lsx_vreplgr2vr_d:
+ /* Map the built-ins to vector fill operations. We need fix up the mode
+ for the element being inserted. */
+ gcc_assert (has_target_p && nops == 2);
+ imode = GET_MODE_INNER (ops[0].mode);
+ ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode);
+ ops[1].mode = imode;
+ break;
+
+ case CODE_FOR_lsx_vilvh_b:
+ case CODE_FOR_lsx_vilvh_h:
+ case CODE_FOR_lsx_vilvh_w:
+ case CODE_FOR_lsx_vilvh_d:
+ case CODE_FOR_lsx_vilvl_b:
+ case CODE_FOR_lsx_vilvl_h:
+ case CODE_FOR_lsx_vilvl_w:
+ case CODE_FOR_lsx_vilvl_d:
+ case CODE_FOR_lsx_vpackev_b:
+ case CODE_FOR_lsx_vpackev_h:
+ case CODE_FOR_lsx_vpackev_w:
+ case CODE_FOR_lsx_vpackod_b:
+ case CODE_FOR_lsx_vpackod_h:
+ case CODE_FOR_lsx_vpackod_w:
+ case CODE_FOR_lsx_vpickev_b:
+ case CODE_FOR_lsx_vpickev_h:
+ case CODE_FOR_lsx_vpickev_w:
+ case CODE_FOR_lsx_vpickod_b:
+ case CODE_FOR_lsx_vpickod_h:
+ case CODE_FOR_lsx_vpickod_w:
+ /* Swap the operands 1 and 2 for interleave operations. Built-ins follow
+ convention of ISA, which have op1 as higher component and op2 as lower
+ component. However, the VEC_PERM op in tree and vec_concat in RTL
+ expects first operand to be lower component, because of which this
+ swap is needed for builtins. */
+ gcc_assert (has_target_p && nops == 3);
+ std::swap (ops[1], ops[2]);
+ break;
+
+ case CODE_FOR_lsx_vslli_b:
+ case CODE_FOR_lsx_vslli_h:
+ case CODE_FOR_lsx_vslli_w:
+ case CODE_FOR_lsx_vslli_d:
+ case CODE_FOR_lsx_vsrai_b:
+ case CODE_FOR_lsx_vsrai_h:
+ case CODE_FOR_lsx_vsrai_w:
+ case CODE_FOR_lsx_vsrai_d:
+ case CODE_FOR_lsx_vsrli_b:
+ case CODE_FOR_lsx_vsrli_h:
+ case CODE_FOR_lsx_vsrli_w:
+ case CODE_FOR_lsx_vsrli_d:
+ gcc_assert (has_target_p && nops == 3);
+ if (CONST_INT_P (ops[2].value))
+ {
+ rangelo = 0;
+ rangehi = GET_MODE_UNIT_BITSIZE (ops[0].mode) - 1;
+ if (IN_RANGE (INTVAL (ops[2].value), rangelo, rangehi))
+ {
+ ops[2].mode = ops[0].mode;
+ ops[2].value = loongarch_gen_const_int_vector (ops[2].mode,
+ INTVAL (ops[2].value));
+ }
+ else
+ error_opno = 2;
+ }
+ break;
+
+ case CODE_FOR_lsx_vinsgr2vr_b:
+ case CODE_FOR_lsx_vinsgr2vr_h:
+ case CODE_FOR_lsx_vinsgr2vr_w:
+ case CODE_FOR_lsx_vinsgr2vr_d:
+ /* Map the built-ins to insert operations. We need to swap operands,
+ fix up the mode for the element being inserted, and generate
+ a bit mask for vec_merge. */
+ gcc_assert (has_target_p && nops == 4);
+ std::swap (ops[1], ops[2]);
+ imode = GET_MODE_INNER (ops[0].mode);
+ ops[1].value = lowpart_subreg (imode, ops[1].value, ops[1].mode);
+ ops[1].mode = imode;
+ rangelo = 0;
+ rangehi = GET_MODE_NUNITS (ops[0].mode) - 1;
+ if (CONST_INT_P (ops[3].value)
+ && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi))
+ ops[3].value = GEN_INT (1 << INTVAL (ops[3].value));
+ else
+ error_opno = 2;
+ break;
+
+ /* Map the built-ins to element insert operations. We need to swap
+ operands and generate a bit mask. */
+ gcc_assert (has_target_p && nops == 4);
+ std::swap (ops[1], ops[2]);
+ std::swap (ops[1], ops[3]);
+ rangelo = 0;
+ rangehi = GET_MODE_NUNITS (ops[0].mode) - 1;
+ if (CONST_INT_P (ops[3].value)
+ && IN_RANGE (INTVAL (ops[3].value), rangelo, rangehi))
+ ops[3].value = GEN_INT (1 << INTVAL (ops[3].value));
+ else
+ error_opno = 2;
+ break;
+
+ case CODE_FOR_lsx_vshuf4i_b:
+ case CODE_FOR_lsx_vshuf4i_h:
+ case CODE_FOR_lsx_vshuf4i_w:
+ case CODE_FOR_lsx_vshuf4i_w_f:
+ gcc_assert (has_target_p && nops == 3);
+ ops[2].value = loongarch_gen_const_int_vector_shuffle (ops[0].mode,
+ INTVAL (ops[2].value));
+ break;
+
+ default:
+ break;
+ }
+
+ if (error_opno != 0)
+ {
+ error ("argument %d to the built-in must be a constant"
+ " in range %d to %d", error_opno, rangelo, rangehi);
+ return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
+ }
+ else if (!maybe_expand_insn (icode, nops, ops))
{
error ("invalid argument to built-in function");
return has_target_p ? gen_reg_rtx (ops[0].mode) : const0_rtx;
@@ -357,6 +1799,50 @@ loongarch_expand_builtin_direct (enum insn_code icode, rtx target, tree exp,
return loongarch_expand_builtin_insn (icode, opno, ops, has_target_p);
}
+/* Expand an LSX built-in for a compare and branch instruction specified by
+ ICODE, set a general-purpose register to 1 if the branch was taken,
+ 0 otherwise. */
+
+static rtx
+loongarch_expand_builtin_lsx_test_branch (enum insn_code icode, tree exp)
+{
+ struct expand_operand ops[3];
+ rtx_insn *cbranch;
+ rtx_code_label *true_label, *done_label;
+ rtx cmp_result;
+
+ true_label = gen_label_rtx ();
+ done_label = gen_label_rtx ();
+
+ create_input_operand (&ops[0], true_label, TYPE_MODE (TREE_TYPE (exp)));
+ loongarch_prepare_builtin_arg (&ops[1], exp, 0);
+ create_fixed_operand (&ops[2], const0_rtx);
+
+ /* Make sure that the operand 1 is a REG. */
+ if (GET_CODE (ops[1].value) != REG)
+ ops[1].value = force_reg (ops[1].mode, ops[1].value);
+
+ if ((cbranch = maybe_gen_insn (icode, 3, ops)) == NULL_RTX)
+ error ("failed to expand built-in function");
+
+ cmp_result = gen_reg_rtx (SImode);
+
+ /* First assume that CMP_RESULT is false. */
+ loongarch_emit_move (cmp_result, const0_rtx);
+
+ /* Branch to TRUE_LABEL if CBRANCH is taken and DONE_LABEL otherwise. */
+ emit_jump_insn (cbranch);
+ emit_jump_insn (gen_jump (done_label));
+ emit_barrier ();
+
+ /* Set CMP_RESULT to true if the branch was taken. */
+ emit_label (true_label);
+ loongarch_emit_move (cmp_result, const1_rtx);
+
+ emit_label (done_label);
+ return cmp_result;
+}
+
/* Implement TARGET_EXPAND_BUILTIN. */
rtx
@@ -377,10 +1863,14 @@ loongarch_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
switch (d->builtin_type)
{
case LARCH_BUILTIN_DIRECT:
+ case LARCH_BUILTIN_LSX:
return loongarch_expand_builtin_direct (d->icode, target, exp, true);
case LARCH_BUILTIN_DIRECT_NO_TARGET:
return loongarch_expand_builtin_direct (d->icode, target, exp, false);
+
+ case LARCH_BUILTIN_LSX_TEST_BRANCH:
+ return loongarch_expand_builtin_lsx_test_branch (d->icode, exp);
}
gcc_unreachable ();
}
diff --git a/gcc/config/loongarch/loongarch-ftypes.def b/gcc/config/loongarch/loongarch-ftypes.def
index 2babff414..2b0d50892 100644
--- a/gcc/config/loongarch/loongarch-ftypes.def
+++ b/gcc/config/loongarch/loongarch-ftypes.def
@@ -32,7 +32,7 @@ along with GCC; see the file COPYING3. If not see
INT for integer_type_node
POINTER for ptr_type_node
- (we don't use PTR because that's a ANSI-compatibillity macro).
+ (we don't use PTR because that's a ANSI-compatibility macro).
Please keep this list lexicographically sorted by the LIST argument. */
@@ -63,3 +63,396 @@ DEF_LARCH_FTYPE (3, (VOID, USI, USI, SI))
DEF_LARCH_FTYPE (3, (VOID, USI, UDI, SI))
DEF_LARCH_FTYPE (3, (USI, USI, USI, USI))
DEF_LARCH_FTYPE (3, (UDI, UDI, UDI, USI))
+
+DEF_LARCH_FTYPE (1, (DF, DF))
+DEF_LARCH_FTYPE (2, (DF, DF, DF))
+DEF_LARCH_FTYPE (1, (DF, V2DF))
+
+DEF_LARCH_FTYPE (1, (DI, DI))
+DEF_LARCH_FTYPE (1, (DI, SI))
+DEF_LARCH_FTYPE (1, (DI, UQI))
+DEF_LARCH_FTYPE (2, (DI, DI, DI))
+DEF_LARCH_FTYPE (2, (DI, DI, SI))
+DEF_LARCH_FTYPE (3, (DI, DI, SI, SI))
+DEF_LARCH_FTYPE (3, (DI, DI, USI, USI))
+DEF_LARCH_FTYPE (3, (DI, DI, DI, QI))
+DEF_LARCH_FTYPE (3, (DI, DI, V2HI, V2HI))
+DEF_LARCH_FTYPE (3, (DI, DI, V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (DI, POINTER, SI))
+DEF_LARCH_FTYPE (2, (DI, SI, SI))
+DEF_LARCH_FTYPE (2, (DI, USI, USI))
+
+DEF_LARCH_FTYPE (2, (DI, V2DI, UQI))
+
+DEF_LARCH_FTYPE (2, (INT, DF, DF))
+DEF_LARCH_FTYPE (2, (INT, SF, SF))
+
+DEF_LARCH_FTYPE (2, (INT, V2SF, V2SF))
+DEF_LARCH_FTYPE (4, (INT, V2SF, V2SF, V2SF, V2SF))
+
+DEF_LARCH_FTYPE (1, (SF, SF))
+DEF_LARCH_FTYPE (2, (SF, SF, SF))
+DEF_LARCH_FTYPE (1, (SF, V2SF))
+DEF_LARCH_FTYPE (1, (SF, V4SF))
+
+DEF_LARCH_FTYPE (2, (SI, POINTER, SI))
+DEF_LARCH_FTYPE (1, (SI, SI))
+DEF_LARCH_FTYPE (1, (SI, UDI))
+DEF_LARCH_FTYPE (2, (QI, QI, QI))
+DEF_LARCH_FTYPE (2, (HI, HI, HI))
+DEF_LARCH_FTYPE (3, (SI, SI, SI, SI))
+DEF_LARCH_FTYPE (3, (SI, SI, SI, QI))
+DEF_LARCH_FTYPE (1, (SI, UQI))
+DEF_LARCH_FTYPE (1, (SI, UV16QI))
+DEF_LARCH_FTYPE (1, (SI, UV2DI))
+DEF_LARCH_FTYPE (1, (SI, UV4SI))
+DEF_LARCH_FTYPE (1, (SI, UV8HI))
+DEF_LARCH_FTYPE (2, (SI, V16QI, UQI))
+DEF_LARCH_FTYPE (1, (SI, V2HI))
+DEF_LARCH_FTYPE (2, (SI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (SI, V4QI))
+DEF_LARCH_FTYPE (2, (SI, V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (SI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (SI, V8HI, UQI))
+DEF_LARCH_FTYPE (1, (SI, VOID))
+
+DEF_LARCH_FTYPE (2, (UDI, UDI, UDI))
+DEF_LARCH_FTYPE (2, (UDI, UV2SI, UV2SI))
+DEF_LARCH_FTYPE (2, (UDI, V2DI, UQI))
+
+DEF_LARCH_FTYPE (2, (USI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (USI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (USI, V8HI, UQI))
+DEF_LARCH_FTYPE (1, (USI, VOID))
+
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, USI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, USI))
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV16QI, V16QI))
+
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (1, (UV2DI, V2DF))
+
+DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UQI))
+DEF_LARCH_FTYPE (2, (UV2SI, UV2SI, UV2SI))
+
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, UQI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, USI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV4HI, UV4HI))
+DEF_LARCH_FTYPE (3, (UV4HI, UV4HI, UV4HI, UQI))
+DEF_LARCH_FTYPE (3, (UV4HI, UV4HI, UV4HI, USI))
+DEF_LARCH_FTYPE (1, (UV4HI, UV8QI))
+DEF_LARCH_FTYPE (2, (UV4HI, UV8QI, UV8QI))
+
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV4SI, V4SF))
+
+DEF_LARCH_FTYPE (2, (UV8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV8HI, V8HI))
+
+
+
+DEF_LARCH_FTYPE (2, (UV8QI, UV4HI, UV4HI))
+DEF_LARCH_FTYPE (1, (UV8QI, UV8QI))
+DEF_LARCH_FTYPE (2, (UV8QI, UV8QI, UV8QI))
+
+DEF_LARCH_FTYPE (2, (V16QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (2, (V16QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (1, (V16QI, HI))
+DEF_LARCH_FTYPE (1, (V16QI, SI))
+DEF_LARCH_FTYPE (2, (V16QI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (1, (V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, SI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, USI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, SI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, UQI))
+DEF_LARCH_FTYPE (4, (V16QI, V16QI, V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, USI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, V16QI, V16QI))
+
+
+DEF_LARCH_FTYPE (1, (V2DF, DF))
+DEF_LARCH_FTYPE (1, (V2DF, UV2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V2DF))
+DEF_LARCH_FTYPE (2, (V2DF, V2DF, V2DF))
+DEF_LARCH_FTYPE (3, (V2DF, V2DF, V2DF, V2DF))
+DEF_LARCH_FTYPE (2, (V2DF, V2DF, V2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V2DI))
+DEF_LARCH_FTYPE (1, (V2DF, V4SF))
+DEF_LARCH_FTYPE (1, (V2DF, V4SI))
+
+DEF_LARCH_FTYPE (2, (V2DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V2DI, DI))
+DEF_LARCH_FTYPE (1, (V2DI, HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (1, (V2DI, V2DF))
+DEF_LARCH_FTYPE (2, (V2DI, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V2DI, V2DI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, QI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, SI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, USI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, SI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, USI))
+DEF_LARCH_FTYPE (4, (V2DI, V2DI, V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V2DI, V4SI, V4SI))
+
+DEF_LARCH_FTYPE (1, (V2HI, SI))
+DEF_LARCH_FTYPE (2, (V2HI, SI, SI))
+DEF_LARCH_FTYPE (3, (V2HI, SI, SI, SI))
+DEF_LARCH_FTYPE (1, (V2HI, V2HI))
+DEF_LARCH_FTYPE (2, (V2HI, V2HI, SI))
+DEF_LARCH_FTYPE (2, (V2HI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (V2HI, V4QI))
+DEF_LARCH_FTYPE (2, (V2HI, V4QI, V2HI))
+
+DEF_LARCH_FTYPE (2, (V2SF, SF, SF))
+DEF_LARCH_FTYPE (1, (V2SF, V2SF))
+DEF_LARCH_FTYPE (2, (V2SF, V2SF, V2SF))
+DEF_LARCH_FTYPE (3, (V2SF, V2SF, V2SF, INT))
+DEF_LARCH_FTYPE (4, (V2SF, V2SF, V2SF, V2SF, V2SF))
+
+DEF_LARCH_FTYPE (2, (V2SI, V2SI, UQI))
+DEF_LARCH_FTYPE (2, (V2SI, V2SI, V2SI))
+DEF_LARCH_FTYPE (2, (V2SI, V4HI, V4HI))
+
+DEF_LARCH_FTYPE (2, (V4HI, V2SI, V2SI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, UQI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, USI))
+DEF_LARCH_FTYPE (2, (V4HI, V4HI, V4HI))
+DEF_LARCH_FTYPE (3, (V4HI, V4HI, V4HI, UQI))
+DEF_LARCH_FTYPE (3, (V4HI, V4HI, V4HI, USI))
+
+DEF_LARCH_FTYPE (1, (V4QI, SI))
+DEF_LARCH_FTYPE (2, (V4QI, V2HI, V2HI))
+DEF_LARCH_FTYPE (1, (V4QI, V4QI))
+DEF_LARCH_FTYPE (2, (V4QI, V4QI, SI))
+DEF_LARCH_FTYPE (2, (V4QI, V4QI, V4QI))
+
+DEF_LARCH_FTYPE (1, (V4SF, SF))
+DEF_LARCH_FTYPE (1, (V4SF, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SF, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V4SF, V4SF))
+DEF_LARCH_FTYPE (2, (V4SF, V4SF, V4SF))
+DEF_LARCH_FTYPE (3, (V4SF, V4SF, V4SF, V4SF))
+DEF_LARCH_FTYPE (2, (V4SF, V4SF, V4SI))
+DEF_LARCH_FTYPE (1, (V4SF, V4SI))
+DEF_LARCH_FTYPE (1, (V4SF, V8HI))
+
+DEF_LARCH_FTYPE (2, (V4SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V4SI, HI))
+DEF_LARCH_FTYPE (1, (V4SI, SI))
+DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DF, V2DF))
+DEF_LARCH_FTYPE (1, (V4SI, V4SF))
+DEF_LARCH_FTYPE (2, (V4SI, V4SF, V4SF))
+DEF_LARCH_FTYPE (1, (V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, QI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, SI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, USI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, USI))
+DEF_LARCH_FTYPE (4, (V4SI, V4SI, V4SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V8HI, V8HI))
+
+DEF_LARCH_FTYPE (2, (V8HI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (1, (V8HI, HI))
+DEF_LARCH_FTYPE (1, (V8HI, SI))
+DEF_LARCH_FTYPE (2, (V8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V8HI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SF, V4SF))
+DEF_LARCH_FTYPE (1, (V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, SI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, V8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, SI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, UQI))
+DEF_LARCH_FTYPE (4, (V8HI, V8HI, V8HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, V8HI, V8HI))
+
+DEF_LARCH_FTYPE (2, (V8QI, V4HI, V4HI))
+DEF_LARCH_FTYPE (1, (V8QI, V8QI))
+DEF_LARCH_FTYPE (2, (V8QI, V8QI, V8QI))
+
+DEF_LARCH_FTYPE (2, (VOID, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (VOID, SI, SI))
+DEF_LARCH_FTYPE (2, (VOID, UQI, SI))
+DEF_LARCH_FTYPE (2, (VOID, USI, UQI))
+DEF_LARCH_FTYPE (1, (VOID, UHI))
+DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V16QI, CVPOINTER, DI))
+DEF_LARCH_FTYPE (3, (VOID, V2DF, POINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V2DI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (2, (VOID, V2HI, V2HI))
+DEF_LARCH_FTYPE (2, (VOID, V4QI, V4QI))
+DEF_LARCH_FTYPE (3, (VOID, V4SF, POINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V4SI, CVPOINTER, SI))
+DEF_LARCH_FTYPE (3, (VOID, V8HI, CVPOINTER, SI))
+
+DEF_LARCH_FTYPE (1, (V8HI, V16QI))
+DEF_LARCH_FTYPE (1, (V4SI, V16QI))
+DEF_LARCH_FTYPE (1, (V2DI, V16QI))
+DEF_LARCH_FTYPE (1, (V4SI, V8HI))
+DEF_LARCH_FTYPE (1, (V2DI, V8HI))
+DEF_LARCH_FTYPE (1, (V2DI, V4SI))
+DEF_LARCH_FTYPE (1, (UV8HI, V16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, V16QI))
+DEF_LARCH_FTYPE (1, (UV2DI, V16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, V8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, V8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, V4SI))
+DEF_LARCH_FTYPE (1, (UV8HI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV16QI))
+DEF_LARCH_FTYPE (1, (UV4SI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV8HI))
+DEF_LARCH_FTYPE (1, (UV2DI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV8HI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (UV4SI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (UV2DI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V8HI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V2DI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV16QI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DI, V2DI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (V16QI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (V8HI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (V4SI, V2DI, UQI))
+DEF_LARCH_FTYPE (2, (UV16QI, UV8HI, UQI))
+DEF_LARCH_FTYPE (2, (UV8HI, UV4SI, UQI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV2DI, UQI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, DI))
+DEF_LARCH_FTYPE (2, (V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UQI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (2, (V4SF, V2DI, V2DI))
+DEF_LARCH_FTYPE (1, (V2DI, V4SF))
+DEF_LARCH_FTYPE (2, (V2DI, UQI, USI))
+DEF_LARCH_FTYPE (2, (V2DI, UQI, UQI))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V16QI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V8HI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V4SI, CVPOINTER))
+DEF_LARCH_FTYPE (4, (VOID, SI, UQI, V2DI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V16QI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V8HI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V4SI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V2DI, SI, CVPOINTER))
+DEF_LARCH_FTYPE (2, (V8HI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V16QI, V16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (UV16QI, V16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V8HI, V8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (UV8HI, V8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, V4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (UV4SI, V4SI, UV4SI))
+DEF_LARCH_FTYPE (2, (V4SI, V16QI, V16QI))
+DEF_LARCH_FTYPE (2, (V4SI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (2, (UV4SI, UV16QI, UV16QI))
+DEF_LARCH_FTYPE (2, (V2DI, V2DI, UV2DI))
+DEF_LARCH_FTYPE (2, (UV2DI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (2, (V4SI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (2, (V2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (2, (V2DI, V8HI, V8HI))
+DEF_LARCH_FTYPE (2, (V2DI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (2, (UV2DI, V2DI, UV2DI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV2DI, V2DI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV4SI, V4SI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, V8HI, V8HI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, UV8HI, V8HI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, UV8HI, UV8HI))
+DEF_LARCH_FTYPE (3, (V8HI, V8HI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, V16QI, V16QI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, UV16QI, V16QI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, UV16QI, UV16QI))
+
+DEF_LARCH_FTYPE(4,(VOID,V16QI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V8HI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V4SI,CVPOINTER,SI,UQI))
+DEF_LARCH_FTYPE(4,(VOID,V2DI,CVPOINTER,SI,UQI))
+
+DEF_LARCH_FTYPE (2, (DI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (DI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (DI, V4SI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V16QI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V8HI, UQI))
+DEF_LARCH_FTYPE (2, (UDI, V4SI, UQI))
+
+DEF_LARCH_FTYPE (3, (UV16QI, UV16QI, V16QI, USI))
+DEF_LARCH_FTYPE (3, (UV8HI, UV8HI, V8HI, USI))
+DEF_LARCH_FTYPE (3, (UV4SI, UV4SI, V4SI, USI))
+DEF_LARCH_FTYPE (3, (UV2DI, UV2DI, V2DI, USI))
+
+DEF_LARCH_FTYPE (1, (BOOLEAN,V16QI))
+DEF_LARCH_FTYPE(2,(V16QI,CVPOINTER,CVPOINTER))
+DEF_LARCH_FTYPE(3,(VOID,V16QI,CVPOINTER,CVPOINTER))
+
+DEF_LARCH_FTYPE (3, (V16QI, V16QI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, SI, UQI))
+DEF_LARCH_FTYPE (3, (V2DI, V2DI, DI, UQI))
+DEF_LARCH_FTYPE (3, (V4SI, V4SI, SI, UQI))
diff --git a/gcc/config/loongarch/lsxintrin.h b/gcc/config/loongarch/lsxintrin.h
new file mode 100644
index 000000000..ec4206990
--- /dev/null
+++ b/gcc/config/loongarch/lsxintrin.h
@@ -0,0 +1,5181 @@
+/* LARCH Loongson SX intrinsics include file.
+
+ Copyright (C) 2018 Free Software Foundation, Inc.
+
+ This file is part of GCC.
+
+ GCC is free software; you can redistribute it and/or modify it
+ under the terms of the GNU General Public License as published
+ by the Free Software Foundation; either version 3, or (at your
+ option) any later version.
+
+ GCC is distributed in the hope that it will be useful, but WITHOUT
+ ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
+ or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
+ License for more details.
+
+ Under Section 7 of GPL version 3, you are granted additional
+ permissions described in the GCC Runtime Library Exception, version
+ 3.1, as published by the Free Software Foundation.
+
+ You should have received a copy of the GNU General Public License and
+ a copy of the GCC Runtime Library Exception along with this program;
+ see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
+ <http://www.gnu.org/licenses/>. */
+
+#ifndef _GCC_LOONGSON_SXINTRIN_H
+#define _GCC_LOONGSON_SXINTRIN_H 1
+
+#if defined(__loongarch_sx)
+typedef signed char v16i8 __attribute__ ((vector_size(16), aligned(16)));
+typedef signed char v16i8_b __attribute__ ((vector_size(16), aligned(1)));
+typedef unsigned char v16u8 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned char v16u8_b __attribute__ ((vector_size(16), aligned(1)));
+typedef short v8i16 __attribute__ ((vector_size(16), aligned(16)));
+typedef short v8i16_h __attribute__ ((vector_size(16), aligned(2)));
+typedef unsigned short v8u16 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned short v8u16_h __attribute__ ((vector_size(16), aligned(2)));
+typedef int v4i32 __attribute__ ((vector_size(16), aligned(16)));
+typedef int v4i32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef unsigned int v4u32 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned int v4u32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef long long v2i64 __attribute__ ((vector_size(16), aligned(16)));
+typedef long long v2i64_d __attribute__ ((vector_size(16), aligned(8)));
+typedef unsigned long long v2u64 __attribute__ ((vector_size(16), aligned(16)));
+typedef unsigned long long v2u64_d __attribute__ ((vector_size(16), aligned(8)));
+typedef float v4f32 __attribute__ ((vector_size(16), aligned(16)));
+typedef float v4f32_w __attribute__ ((vector_size(16), aligned(4)));
+typedef double v2f64 __attribute__ ((vector_size(16), aligned(16)));
+typedef double v2f64_d __attribute__ ((vector_size(16), aligned(8)));
+
+typedef long long __m128i __attribute__ ((__vector_size__ (16), __may_alias__));
+typedef float __m128 __attribute__ ((__vector_size__ (16), __may_alias__));
+typedef double __m128d __attribute__ ((__vector_size__ (16), __may_alias__));
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsll_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsll_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsll_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsll_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsll_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vslli_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vslli_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vslli_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vslli_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vslli_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslli_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vslli_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vslli_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsra_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsra_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsra_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsra_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsra_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsrai_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsrai_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsrai_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsrai_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsrai_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsrai_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsrai_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsrai_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrar_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrar_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrar_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrar_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrar_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsrari_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsrari_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsrari_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsrari_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsrari_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsrari_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsrari_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsrari_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrl_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrl_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrl_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrl_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrl_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsrli_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsrli_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsrli_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsrli_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsrli_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsrli_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsrli_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsrli_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlr_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsrlri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsrlri_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsrlri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsrlri_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsrlri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsrlri_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsrlri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsrlri_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitclr_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitclr_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitclr_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitclr_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitclr_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vbitclri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vbitclri_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vbitclri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vbitclri_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vbitclri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vbitclri_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vbitclri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vbitclri_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitset_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitset_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitset_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitset_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitset_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vbitseti_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vbitseti_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vbitseti_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vbitseti_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vbitseti_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vbitseti_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vbitseti_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vbitseti_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitrev_b ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitrev_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitrev_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitrev_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vbitrev_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vbitrevi_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vbitrevi_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vbitrevi_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vbitrevi_h ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vbitrevi_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vbitrevi_w ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vbitrevi_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vbitrevi_d ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vaddi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vaddi_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vaddi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vaddi_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vaddi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vaddi_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vaddi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vaddi_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsub_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsub_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsub_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsub_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsubi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsubi_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsubi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsubi_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsubi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsubi_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsubi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsubi_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V16QI, V16QI, QI. */
+#define __lsx_vmaxi_b(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V8HI, V8HI, QI. */
+#define __lsx_vmaxi_h(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V4SI, V4SI, QI. */
+#define __lsx_vmaxi_w(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V2DI, V2DI, QI. */
+#define __lsx_vmaxi_d(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmax_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmax_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vmaxi_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vmaxi_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vmaxi_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vmaxi_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmaxi_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V16QI, V16QI, QI. */
+#define __lsx_vmini_b(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V8HI, V8HI, QI. */
+#define __lsx_vmini_h(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V4SI, V4SI, QI. */
+#define __lsx_vmini_w(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V2DI, V2DI, QI. */
+#define __lsx_vmini_d(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmin_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmin_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vmini_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vmini_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vmini_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vmini_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vmini_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vseq_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vseq_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vseq_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vseq_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vseq_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V16QI, V16QI, QI. */
+#define __lsx_vseqi_b(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vseqi_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V8HI, V8HI, QI. */
+#define __lsx_vseqi_h(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vseqi_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V4SI, V4SI, QI. */
+#define __lsx_vseqi_w(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vseqi_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V2DI, V2DI, QI. */
+#define __lsx_vseqi_d(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vseqi_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V16QI, V16QI, QI. */
+#define __lsx_vslti_b(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V8HI, V8HI, QI. */
+#define __lsx_vslti_h(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V4SI, V4SI, QI. */
+#define __lsx_vslti_w(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V2DI, V2DI, QI. */
+#define __lsx_vslti_d(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vslt_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vslt_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, UV16QI, UQI. */
+#define __lsx_vslti_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, UV8HI, UQI. */
+#define __lsx_vslti_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, UV4SI, UQI. */
+#define __lsx_vslti_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V2DI, UV2DI, UQI. */
+#define __lsx_vslti_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslti_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V16QI, V16QI, QI. */
+#define __lsx_vslei_b(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V8HI, V8HI, QI. */
+#define __lsx_vslei_h(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V4SI, V4SI, QI. */
+#define __lsx_vslei_w(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, si5. */
+/* Data types in instruction templates: V2DI, V2DI, QI. */
+#define __lsx_vslei_d(/*__m128i*/ _1, /*si5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsle_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsle_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, UV16QI, UQI. */
+#define __lsx_vslei_bu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, UV8HI, UQI. */
+#define __lsx_vslei_hu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, UV4SI, UQI. */
+#define __lsx_vslei_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V2DI, UV2DI, UQI. */
+#define __lsx_vslei_du(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vslei_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vsat_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vsat_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vsat_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vsat_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vsat_bu(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV8HI, UV8HI, UQI. */
+#define __lsx_vsat_hu(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV4SI, UV4SI, UQI. */
+#define __lsx_vsat_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV2DI, UV2DI, UQI. */
+#define __lsx_vsat_du(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vsat_du ((v2u64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadda_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadda_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadda_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadda_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadda_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsadd_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsadd_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavg_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavg_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vavgr_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vavgr_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssub_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssub_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vabsd_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vabsd_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmul_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmul_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmul_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmul_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmul_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmadd_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmadd_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmadd_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmadd_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmadd_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmsub_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmsub_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmsub_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsub_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmsub_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vdiv_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vdiv_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_hu_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_hu_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_wu_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_wu_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_du_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_du_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_hu_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_hu_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_wu_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_wu_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_du_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_du_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmod_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmod_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, rk. */
+/* Data types in instruction templates: V16QI, V16QI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_b (__m128i _1, int _2)
+{
+ return (__m128i)__builtin_lsx_vreplve_b ((v16i8)_1, (int)_2);
+}
+
+/* Assembly instruction format: vd, vj, rk. */
+/* Data types in instruction templates: V8HI, V8HI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_h (__m128i _1, int _2)
+{
+ return (__m128i)__builtin_lsx_vreplve_h ((v8i16)_1, (int)_2);
+}
+
+/* Assembly instruction format: vd, vj, rk. */
+/* Data types in instruction templates: V4SI, V4SI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_w (__m128i _1, int _2)
+{
+ return (__m128i)__builtin_lsx_vreplve_w ((v4i32)_1, (int)_2);
+}
+
+/* Assembly instruction format: vd, vj, rk. */
+/* Data types in instruction templates: V2DI, V2DI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplve_d (__m128i _1, int _2)
+{
+ return (__m128i)__builtin_lsx_vreplve_d ((v2i64)_1, (int)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vreplvei_b(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vreplvei_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vreplvei_h(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vreplvei_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui2. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vreplvei_w(/*__m128i*/ _1, /*ui2*/ _2) \
+ ((__m128i)__builtin_lsx_vreplvei_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui1. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vreplvei_d(/*__m128i*/ _1, /*ui1*/ _2) \
+ ((__m128i)__builtin_lsx_vreplvei_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickev_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickev_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickev_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickev_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickev_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpickod_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpickod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvh_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvh_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvh_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvh_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvh_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvl_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvl_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvl_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vilvl_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vilvl_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackev_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackev_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackev_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackev_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackev_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackod_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackod_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackod_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpackod_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vpackod_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vshuf_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vshuf_w ((v4i32)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vshuf_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vand_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vand_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vandi_b(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vandi_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vor_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vnor_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vnor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vnori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vnori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vxor_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vxor_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: UV16QI, UV16QI, UQI. */
+#define __lsx_vxori_b(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vxori_b ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vbitsel_v (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vbitsel_v ((v16u8)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI, USI. */
+#define __lsx_vbitseli_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vbitseli_b ((v16u8)(_1), (v16u8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V16QI, V16QI, USI. */
+#define __lsx_vshuf4i_b(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vshuf4i_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V8HI, V8HI, USI. */
+#define __lsx_vshuf4i_h(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vshuf4i_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V4SI, V4SI, USI. */
+#define __lsx_vshuf4i_w(/*__m128i*/ _1, /*ui8*/ _2) \
+ ((__m128i)__builtin_lsx_vshuf4i_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj. */
+/* Data types in instruction templates: V16QI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_b (int _1)
+{
+ return (__m128i)__builtin_lsx_vreplgr2vr_b ((int)_1);
+}
+
+/* Assembly instruction format: vd, rj. */
+/* Data types in instruction templates: V8HI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_h (int _1)
+{
+ return (__m128i)__builtin_lsx_vreplgr2vr_h ((int)_1);
+}
+
+/* Assembly instruction format: vd, rj. */
+/* Data types in instruction templates: V4SI, SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_w (int _1)
+{
+ return (__m128i)__builtin_lsx_vreplgr2vr_w ((int)_1);
+}
+
+/* Assembly instruction format: vd, rj. */
+/* Data types in instruction templates: V2DI, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vreplgr2vr_d (long int _1)
+{
+ return (__m128i)__builtin_lsx_vreplgr2vr_d ((long int)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vpcnt_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vpcnt_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vpcnt_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vpcnt_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vpcnt_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclo_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclo_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclo_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclo_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclo_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclz_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclz_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vclz_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vclz_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: rd, vj, ui4. */
+/* Data types in instruction templates: SI, V16QI, UQI. */
+#define __lsx_vpickve2gr_b(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((int)__builtin_lsx_vpickve2gr_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui3. */
+/* Data types in instruction templates: SI, V8HI, UQI. */
+#define __lsx_vpickve2gr_h(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((int)__builtin_lsx_vpickve2gr_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui2. */
+/* Data types in instruction templates: SI, V4SI, UQI. */
+#define __lsx_vpickve2gr_w(/*__m128i*/ _1, /*ui2*/ _2) \
+ ((int)__builtin_lsx_vpickve2gr_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui1. */
+/* Data types in instruction templates: DI, V2DI, UQI. */
+#define __lsx_vpickve2gr_d(/*__m128i*/ _1, /*ui1*/ _2) \
+ ((long int)__builtin_lsx_vpickve2gr_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui4. */
+/* Data types in instruction templates: USI, V16QI, UQI. */
+#define __lsx_vpickve2gr_bu(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((unsigned int)__builtin_lsx_vpickve2gr_bu ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui3. */
+/* Data types in instruction templates: USI, V8HI, UQI. */
+#define __lsx_vpickve2gr_hu(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((unsigned int)__builtin_lsx_vpickve2gr_hu ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui2. */
+/* Data types in instruction templates: USI, V4SI, UQI. */
+#define __lsx_vpickve2gr_wu(/*__m128i*/ _1, /*ui2*/ _2) \
+ ((unsigned int)__builtin_lsx_vpickve2gr_wu ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: rd, vj, ui1. */
+/* Data types in instruction templates: UDI, V2DI, UQI. */
+#define __lsx_vpickve2gr_du(/*__m128i*/ _1, /*ui1*/ _2) \
+ ((unsigned long int)__builtin_lsx_vpickve2gr_du ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, SI, UQI. */
+#define __lsx_vinsgr2vr_b(/*__m128i*/ _1, /*int*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vinsgr2vr_b ((v16i8)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format: vd, rj, ui3. */
+/* Data types in instruction templates: V8HI, V8HI, SI, UQI. */
+#define __lsx_vinsgr2vr_h(/*__m128i*/ _1, /*int*/ _2, /*ui3*/ _3) \
+ ((__m128i)__builtin_lsx_vinsgr2vr_h ((v8i16)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format: vd, rj, ui2. */
+/* Data types in instruction templates: V4SI, V4SI, SI, UQI. */
+#define __lsx_vinsgr2vr_w(/*__m128i*/ _1, /*int*/ _2, /*ui2*/ _3) \
+ ((__m128i)__builtin_lsx_vinsgr2vr_w ((v4i32)(_1), (int)(_2), (_3)))
+
+/* Assembly instruction format: vd, rj, ui1. */
+/* Data types in instruction templates: V2DI, V2DI, DI, UQI. */
+#define __lsx_vinsgr2vr_d(/*__m128i*/ _1, /*long int*/ _2, /*ui1*/ _3) \
+ ((__m128i)__builtin_lsx_vinsgr2vr_d ((v2i64)(_1), (long int)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfadd_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfadd_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfadd_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfadd_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfsub_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfsub_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfsub_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfsub_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmul_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfmul_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmul_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfmul_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfdiv_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfdiv_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfdiv_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfdiv_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcvt_h_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcvt_h_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvt_s_d (__m128d _1, __m128d _2)
+{
+ return (__m128)__builtin_lsx_vfcvt_s_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmin_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfmin_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmin_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfmin_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmina_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfmina_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmina_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfmina_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmax_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfmax_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmax_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfmax_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmaxa_s (__m128 _1, __m128 _2)
+{
+ return (__m128)__builtin_lsx_vfmaxa_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmaxa_d (__m128d _1, __m128d _2)
+{
+ return (__m128d)__builtin_lsx_vfmaxa_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfclass_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vfclass_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfclass_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vfclass_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfsqrt_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfsqrt_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfsqrt_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfsqrt_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrecip_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrecip_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrecip_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrecip_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrint_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrint_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrint_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrint_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrsqrt_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrsqrt_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrsqrt_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrsqrt_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vflogb_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vflogb_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vflogb_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vflogb_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvth_s_h (__m128i _1)
+{
+ return (__m128)__builtin_lsx_vfcvth_s_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfcvth_d_s (__m128 _1)
+{
+ return (__m128d)__builtin_lsx_vfcvth_d_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfcvtl_s_h (__m128i _1)
+{
+ return (__m128)__builtin_lsx_vfcvtl_s_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfcvtl_d_s (__m128 _1)
+{
+ return (__m128d)__builtin_lsx_vfcvtl_d_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_w_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftint_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_l_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftint_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_wu_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftint_wu_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_lu_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftint_lu_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_w_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrz_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_l_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftintrz_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_wu_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrz_wu_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_lu_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftintrz_lu_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_w (__m128i _1)
+{
+ return (__m128)__builtin_lsx_vffint_s_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffint_d_l (__m128i _1)
+{
+ return (__m128d)__builtin_lsx_vffint_d_l ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SF, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_wu (__m128i _1)
+{
+ return (__m128)__builtin_lsx_vffint_s_wu ((v4u32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffint_d_lu (__m128i _1)
+{
+ return (__m128d)__builtin_lsx_vffint_d_lu ((v2u64)_1);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vandn_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vandn_v ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vneg_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vneg_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vneg_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vneg_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vneg_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmuh_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmuh_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V8HI, V16QI, UQI. */
+#define __lsx_vsllwil_h_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_h_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V4SI, V8HI, UQI. */
+#define __lsx_vsllwil_w_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_w_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V2DI, V4SI, UQI. */
+#define __lsx_vsllwil_d_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_d_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: UV8HI, UV16QI, UQI. */
+#define __lsx_vsllwil_hu_bu(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_hu_bu ((v16u8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV4SI, UV8HI, UQI. */
+#define __lsx_vsllwil_wu_hu(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_wu_hu ((v8u16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV2DI, UV4SI, UQI. */
+#define __lsx_vsllwil_du_wu(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vsllwil_du_wu ((v4u32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsran_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsran_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsran_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsran_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_bu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_hu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssran_wu_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssran_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrarn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrarn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrarn_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrarn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_bu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_hu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrarn_wu_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrarn_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrln_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrln_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrln_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrln_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_bu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_hu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_wu_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlrn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlrn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsrlrn_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsrlrn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV16QI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_bu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_bu_h ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_hu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_hu_w ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_wu_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_wu_d ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, UQI. */
+#define __lsx_vfrstpi_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vfrstpi_b ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, UQI. */
+#define __lsx_vfrstpi_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vfrstpi_h ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfrstp_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vfrstp_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfrstp_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vfrstp_h ((v8i16)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vshuf4i_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vshuf4i_d ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vbsrl_v(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vbsrl_v ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vbsll_v(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vbsll_v ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vextrins_b(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vextrins_b ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vextrins_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vextrins_h ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vextrins_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vextrins_w ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vextrins_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vextrins_d ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmskltz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmskltz_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmskltz_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskltz_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmskltz_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsigncov_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsigncov_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsigncov_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsigncov_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsigncov_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmadd_s (__m128 _1, __m128 _2, __m128 _3)
+{
+ return (__m128)__builtin_lsx_vfmadd_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmadd_d (__m128d _1, __m128d _2, __m128d _3)
+{
+ return (__m128d)__builtin_lsx_vfmadd_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfmsub_s (__m128 _1, __m128 _2, __m128 _3)
+{
+ return (__m128)__builtin_lsx_vfmsub_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfmsub_d (__m128d _1, __m128d _2, __m128d _3)
+{
+ return (__m128d)__builtin_lsx_vfmsub_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfnmadd_s (__m128 _1, __m128 _2, __m128 _3)
+{
+ return (__m128)__builtin_lsx_vfnmadd_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfnmadd_d (__m128d _1, __m128d _2, __m128d _3)
+{
+ return (__m128d)__builtin_lsx_vfnmadd_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V4SF, V4SF, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfnmsub_s (__m128 _1, __m128 _2, __m128 _3)
+{
+ return (__m128)__builtin_lsx_vfnmsub_s ((v4f32)_1, (v4f32)_2, (v4f32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V2DF, V2DF, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfnmsub_d (__m128d _1, __m128d _2, __m128d _3)
+{
+ return (__m128d)__builtin_lsx_vfnmsub_d ((v2f64)_1, (v2f64)_2, (v2f64)_3);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_w_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrne_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_l_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftintrne_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_w_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrp_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_l_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftintrp_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_w_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrm_w_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_l_d (__m128d _1)
+{
+ return (__m128i)__builtin_lsx_vftintrm_l_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftint_w_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vftint_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SF, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vffint_s_l (__m128i _1, __m128i _2)
+{
+ return (__m128)__builtin_lsx_vffint_s_l ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrz_w_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vftintrz_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrp_w_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vftintrp_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrm_w_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vftintrm_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrne_w_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vftintrne_w_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintl_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftinth_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftinth_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffinth_d_w (__m128i _1)
+{
+ return (__m128d)__builtin_lsx_vffinth_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DF, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vffintl_d_w (__m128i _1)
+{
+ return (__m128d)__builtin_lsx_vffintl_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrzl_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrzl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrzh_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrzh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrpl_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrpl_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrph_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrph_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrml_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrml_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrmh_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrmh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrnel_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrnel_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vftintrneh_l_s (__m128 _1)
+{
+ return (__m128i)__builtin_lsx_vftintrneh_l_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrne_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrintrne_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrne_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrintrne_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrz_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrintrz_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrz_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrintrz_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrp_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrintrp_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrp_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrintrp_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128 __lsx_vfrintrm_s (__m128 _1)
+{
+ return (__m128)__builtin_lsx_vfrintrm_s ((v4f32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128d __lsx_vfrintrm_d (__m128d _1)
+{
+ return (__m128d)__builtin_lsx_vfrintrm_d ((v2f64)_1);
+}
+
+/* Assembly instruction format: vd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V16QI, CVPOINTER, SI, UQI. */
+#define __lsx_vstelm_b(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lsx_vstelm_b ((v16i8)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: vd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V8HI, CVPOINTER, SI, UQI. */
+#define __lsx_vstelm_h(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lsx_vstelm_h ((v8i16)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: vd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V4SI, CVPOINTER, SI, UQI. */
+#define __lsx_vstelm_w(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lsx_vstelm_w ((v4i32)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: vd, rj, si8, idx. */
+/* Data types in instruction templates: VOID, V2DI, CVPOINTER, SI, UQI. */
+#define __lsx_vstelm_d(/*__m128i*/ _1, /*void **/ _2, /*si8*/ _3, /*idx*/ _4) \
+ ((void)__builtin_lsx_vstelm_d ((v2i64)(_1), (void *)(_2), (_3), (_4)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_d_wu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_w_hu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_h_bu_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_d_wu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_w_hu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_h_bu_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwev_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsubwod_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsubwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwev_q_du_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwev_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vaddwod_q_du_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vaddwod_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_d_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_w_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_h_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_wu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_d_wu ((v4u32)_1, (v4u32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_hu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_w_hu ((v8u16)_1, (v8u16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_bu (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_h_bu ((v16u8)_1, (v16u8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_d_wu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_w_hu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_h_bu_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_d_wu_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_d_wu_w ((v4u32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_w_hu_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_w_hu_h ((v8u16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_h_bu_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_h_bu_b ((v16u8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_q_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwev_q_du_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwev_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmulwod_q_du_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vmulwod_q_du_d ((v2u64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhaddw_qu_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhaddw_qu_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_q_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_q_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vhsubw_qu_du (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vhsubw_qu_du ((v2u64)_1, (v2u64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_d_w ((v2i64)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_w_h ((v4i32)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_h_b ((v8i16)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_wu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_d_wu ((v2u64)_1, (v4u32)_2, (v4u32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_hu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_w_hu ((v4u32)_1, (v8u16)_2, (v8u16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_bu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_h_bu ((v8u16)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_d_w ((v2i64)_1, (v4i32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_w_h ((v4i32)_1, (v8i16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_h_b ((v8i16)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV4SI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_wu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_d_wu ((v2u64)_1, (v4u32)_2, (v4u32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV4SI, UV4SI, UV8HI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_hu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_w_hu ((v4u32)_1, (v8u16)_2, (v8u16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV8HI, UV8HI, UV16QI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_bu (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_h_bu ((v8u16)_1, (v16u8)_2, (v16u8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_d_wu_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_d_wu_w ((v2i64)_1, (v4u32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_w_hu_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_w_hu_h ((v4i32)_1, (v8u16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_h_bu_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_h_bu_b ((v8i16)_1, (v16u8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, UV4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_d_wu_w (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_d_wu_w ((v2i64)_1, (v4u32)_2, (v4i32)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, UV8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_w_hu_h (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_w_hu_h ((v4i32)_1, (v8u16)_2, (v8i16)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, UV16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_h_bu_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_h_bu_b ((v8i16)_1, (v16u8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_q_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_q_d ((v2i64)_1, (v2i64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_du (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_q_du ((v2u64)_1, (v2u64)_2, (v2u64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: UV2DI, UV2DI, UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_du (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_q_du ((v2u64)_1, (v2u64)_2, (v2u64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwev_q_du_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwev_q_du_d ((v2i64)_1, (v2u64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, UV2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmaddwod_q_du_d (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vmaddwod_q_du_d ((v2i64)_1, (v2u64)_2, (v2i64)_3);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_b (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vrotr_b ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vrotr_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vrotr_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vrotr_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vrotr_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vadd_q (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vadd_q ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vsub_q (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vsub_q ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, rj, si12. */
+/* Data types in instruction templates: V16QI, CVPOINTER, SI. */
+#define __lsx_vldrepl_b(/*void **/ _1, /*si12*/ _2) \
+ ((__m128i)__builtin_lsx_vldrepl_b ((void *)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj, si11. */
+/* Data types in instruction templates: V8HI, CVPOINTER, SI. */
+#define __lsx_vldrepl_h(/*void **/ _1, /*si11*/ _2) \
+ ((__m128i)__builtin_lsx_vldrepl_h ((void *)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj, si10. */
+/* Data types in instruction templates: V4SI, CVPOINTER, SI. */
+#define __lsx_vldrepl_w(/*void **/ _1, /*si10*/ _2) \
+ ((__m128i)__builtin_lsx_vldrepl_w ((void *)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj, si9. */
+/* Data types in instruction templates: V2DI, CVPOINTER, SI. */
+#define __lsx_vldrepl_d(/*void **/ _1, /*si9*/ _2) \
+ ((__m128i)__builtin_lsx_vldrepl_d ((void *)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmskgez_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmskgez_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vmsknz_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vmsknz_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V8HI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_h_b (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_h_b ((v16i8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V4SI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_w_h (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_w_h ((v8i16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_d_w (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_d_w ((v4i32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_q_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_q_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV8HI, UV16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_hu_bu (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_hu_bu ((v16u8)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV4SI, UV8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_wu_hu (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_wu_hu ((v8u16)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV2DI, UV4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_du_wu (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_du_wu ((v4u32)_1);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vexth_qu_du (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vexth_qu_du ((v2u64)_1);
+}
+
+/* Assembly instruction format: vd, vj, ui3. */
+/* Data types in instruction templates: V16QI, V16QI, UQI. */
+#define __lsx_vrotri_b(/*__m128i*/ _1, /*ui3*/ _2) \
+ ((__m128i)__builtin_lsx_vrotri_b ((v16i8)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V8HI, V8HI, UQI. */
+#define __lsx_vrotri_h(/*__m128i*/ _1, /*ui4*/ _2) \
+ ((__m128i)__builtin_lsx_vrotri_h ((v8i16)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V4SI, V4SI, UQI. */
+#define __lsx_vrotri_w(/*__m128i*/ _1, /*ui5*/ _2) \
+ ((__m128i)__builtin_lsx_vrotri_w ((v4i32)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V2DI, V2DI, UQI. */
+#define __lsx_vrotri_d(/*__m128i*/ _1, /*ui6*/ _2) \
+ ((__m128i)__builtin_lsx_vrotri_d ((v2i64)(_1), (_2)))
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vextl_q_d (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vextl_q_d ((v2i64)_1);
+}
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vsrlni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vsrlni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vsrlni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vsrlni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vsrlrni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlrni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vsrlrni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlrni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vsrlrni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlrni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vsrlrni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vsrlrni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vssrlni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vssrlni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vssrlni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vssrlni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV16QI, UV16QI, V16QI, USI. */
+#define __lsx_vssrlni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, V8HI, USI. */
+#define __lsx_vssrlni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV4SI, UV4SI, V4SI, USI. */
+#define __lsx_vssrlni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: UV2DI, UV2DI, V2DI, USI. */
+#define __lsx_vssrlni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vssrlrni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vssrlrni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vssrlrni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vssrlrni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV16QI, UV16QI, V16QI, USI. */
+#define __lsx_vssrlrni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, V8HI, USI. */
+#define __lsx_vssrlrni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV4SI, UV4SI, V4SI, USI. */
+#define __lsx_vssrlrni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: UV2DI, UV2DI, V2DI, USI. */
+#define __lsx_vssrlrni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrlrni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vsrani_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vsrani_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vsrani_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vsrani_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vsrani_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vsrani_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vsrani_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vsrani_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vsrarni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vsrarni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vsrarni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vsrarni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vsrarni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vsrarni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vsrarni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vsrarni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vssrani_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vssrani_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vssrani_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vssrani_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV16QI, UV16QI, V16QI, USI. */
+#define __lsx_vssrani_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, V8HI, USI. */
+#define __lsx_vssrani_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV4SI, UV4SI, V4SI, USI. */
+#define __lsx_vssrani_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: UV2DI, UV2DI, V2DI, USI. */
+#define __lsx_vssrani_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrani_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, USI. */
+#define __lsx_vssrarni_b_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_b_h ((v16i8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: V8HI, V8HI, V8HI, USI. */
+#define __lsx_vssrarni_h_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_h_w ((v8i16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vssrarni_w_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_w_d ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: V2DI, V2DI, V2DI, USI. */
+#define __lsx_vssrarni_d_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_d_q ((v2i64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui4. */
+/* Data types in instruction templates: UV16QI, UV16QI, V16QI, USI. */
+#define __lsx_vssrarni_bu_h(/*__m128i*/ _1, /*__m128i*/ _2, /*ui4*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_bu_h ((v16u8)(_1), (v16i8)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui5. */
+/* Data types in instruction templates: UV8HI, UV8HI, V8HI, USI. */
+#define __lsx_vssrarni_hu_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui5*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_hu_w ((v8u16)(_1), (v8i16)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui6. */
+/* Data types in instruction templates: UV4SI, UV4SI, V4SI, USI. */
+#define __lsx_vssrarni_wu_d(/*__m128i*/ _1, /*__m128i*/ _2, /*ui6*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_wu_d ((v4u32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui7. */
+/* Data types in instruction templates: UV2DI, UV2DI, V2DI, USI. */
+#define __lsx_vssrarni_du_q(/*__m128i*/ _1, /*__m128i*/ _2, /*ui7*/ _3) \
+ ((__m128i)__builtin_lsx_vssrarni_du_q ((v2u64)(_1), (v2i64)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, ui8. */
+/* Data types in instruction templates: V4SI, V4SI, V4SI, USI. */
+#define __lsx_vpermi_w(/*__m128i*/ _1, /*__m128i*/ _2, /*ui8*/ _3) \
+ ((__m128i)__builtin_lsx_vpermi_w ((v4i32)(_1), (v4i32)(_2), (_3)))
+
+/* Assembly instruction format: vd, rj, si12. */
+/* Data types in instruction templates: V16QI, CVPOINTER, SI. */
+#define __lsx_vld(/*void **/ _1, /*si12*/ _2) \
+ ((__m128i)__builtin_lsx_vld ((void *)(_1), (_2)))
+
+/* Assembly instruction format: vd, rj, si12. */
+/* Data types in instruction templates: VOID, V16QI, CVPOINTER, SI. */
+#define __lsx_vst(/*__m128i*/ _1, /*void **/ _2, /*si12*/ _3) \
+ ((void)__builtin_lsx_vst ((v16i8)(_1), (void *)(_2), (_3)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrlrn_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrlrn_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V8HI, V8HI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_b_h (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_b_h ((v8i16)_1, (v8i16)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V8HI, V4SI, V4SI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_h_w (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_h_w ((v4i32)_1, (v4i32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V2DI, V2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vssrln_w_d (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vssrln_w_d ((v2i64)_1, (v2i64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vorn_v (__m128i _1, __m128i _2)
+{
+ return (__m128i)__builtin_lsx_vorn_v ((v16i8)_1, (v16i8)_2);
+}
+
+/* Assembly instruction format: vd, i13. */
+/* Data types in instruction templates: V2DI, HI. */
+#define __lsx_vldi(/*i13*/ _1) \
+ ((__m128i)__builtin_lsx_vldi ((_1)))
+
+/* Assembly instruction format: vd, vj, vk, va. */
+/* Data types in instruction templates: V16QI, V16QI, V16QI, V16QI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vshuf_b (__m128i _1, __m128i _2, __m128i _3)
+{
+ return (__m128i)__builtin_lsx_vshuf_b ((v16i8)_1, (v16i8)_2, (v16i8)_3);
+}
+
+/* Assembly instruction format: vd, rj, rk. */
+/* Data types in instruction templates: V16QI, CVPOINTER, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vldx (void * _1, long int _2)
+{
+ return (__m128i)__builtin_lsx_vldx ((void *)_1, (long int)_2);
+}
+
+/* Assembly instruction format: vd, rj, rk. */
+/* Data types in instruction templates: VOID, V16QI, CVPOINTER, DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+void __lsx_vstx (__m128i _1, void * _2, long int _3)
+{
+ return (void)__builtin_lsx_vstx ((v16i8)_1, (void *)_2, (long int)_3);
+}
+
+/* Assembly instruction format: vd, vj. */
+/* Data types in instruction templates: UV2DI, UV2DI. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vextl_qu_du (__m128i _1)
+{
+ return (__m128i)__builtin_lsx_vextl_qu_du ((v2u64)_1);
+}
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV16QI. */
+#define __lsx_bnz_b(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bnz_b ((v16u8)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV2DI. */
+#define __lsx_bnz_d(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bnz_d ((v2u64)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV8HI. */
+#define __lsx_bnz_h(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bnz_h ((v8u16)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV16QI. */
+#define __lsx_bnz_v(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bnz_v ((v16u8)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV4SI. */
+#define __lsx_bnz_w(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bnz_w ((v4u32)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV16QI. */
+#define __lsx_bz_b(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bz_b ((v16u8)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV2DI. */
+#define __lsx_bz_d(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bz_d ((v2u64)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV8HI. */
+#define __lsx_bz_h(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bz_h ((v8u16)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV16QI. */
+#define __lsx_bz_v(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bz_v ((v16u8)(_1)))
+
+/* Assembly instruction format: cd, vj. */
+/* Data types in instruction templates: SI, UV4SI. */
+#define __lsx_bz_w(/*__m128i*/ _1) \
+ ((int)__builtin_lsx_bz_w ((v4u32)(_1)))
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_caf_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_caf_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_caf_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_caf_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_ceq_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_ceq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_ceq_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_ceq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cle_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cle_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cle_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cle_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_clt_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_clt_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_clt_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_clt_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cne_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cne_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cne_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cne_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cor_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cor_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cor_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cor_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cueq_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cueq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cueq_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cueq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cule_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cule_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cule_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cule_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cult_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cult_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cult_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cult_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cun_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cun_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cune_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cune_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cune_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cune_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_cun_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_cun_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_saf_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_saf_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_saf_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_saf_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_seq_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_seq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_seq_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_seq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sle_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sle_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sle_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sle_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_slt_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_slt_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_slt_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_slt_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sne_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sne_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sne_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sne_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sor_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sor_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sor_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sor_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sueq_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sueq_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sueq_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sueq_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sule_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sule_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sule_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sule_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sult_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sult_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sult_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sult_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sun_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sun_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V2DI, V2DF, V2DF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sune_d (__m128d _1, __m128d _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sune_d ((v2f64)_1, (v2f64)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sune_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sune_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, vj, vk. */
+/* Data types in instruction templates: V4SI, V4SF, V4SF. */
+extern __inline __attribute__((__gnu_inline__, __always_inline__, __artificial__))
+__m128i __lsx_vfcmp_sun_s (__m128 _1, __m128 _2)
+{
+ return (__m128i)__builtin_lsx_vfcmp_sun_s ((v4f32)_1, (v4f32)_2);
+}
+
+/* Assembly instruction format: vd, si10. */
+/* Data types in instruction templates: V16QI, HI. */
+#define __lsx_vrepli_b(/*si10*/ _1) \
+ ((__m128i)__builtin_lsx_vrepli_b ((_1)))
+
+/* Assembly instruction format: vd, si10. */
+/* Data types in instruction templates: V2DI, HI. */
+#define __lsx_vrepli_d(/*si10*/ _1) \
+ ((__m128i)__builtin_lsx_vrepli_d ((_1)))
+
+/* Assembly instruction format: vd, si10. */
+/* Data types in instruction templates: V8HI, HI. */
+#define __lsx_vrepli_h(/*si10*/ _1) \
+ ((__m128i)__builtin_lsx_vrepli_h ((_1)))
+
+/* Assembly instruction format: vd, si10. */
+/* Data types in instruction templates: V4SI, HI. */
+#define __lsx_vrepli_w(/*si10*/ _1) \
+ ((__m128i)__builtin_lsx_vrepli_w ((_1)))
+
+#endif /* defined(__loongarch_sx) */
+#endif /* _GCC_LOONGSON_SXINTRIN_H */
--
2.33.0
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/fly_fzc/gcc.git
git@gitee.com:fly_fzc/gcc.git
fly_fzc
gcc
gcc
master

搜索帮助

0d507c66 1850385 C8b1a773 1850385