ladderstep.s 169 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579
  1. # qhasm: int64 workp
  2. # qhasm: input workp
  3. # qhasm: int64 caller1
  4. # qhasm: int64 caller2
  5. # qhasm: int64 caller3
  6. # qhasm: int64 caller4
  7. # qhasm: int64 caller5
  8. # qhasm: int64 caller6
  9. # qhasm: int64 caller7
  10. # qhasm: caller caller1
  11. # qhasm: caller caller2
  12. # qhasm: caller caller3
  13. # qhasm: caller caller4
  14. # qhasm: caller caller5
  15. # qhasm: caller caller6
  16. # qhasm: caller caller7
  17. # qhasm: stack64 caller1_stack
  18. # qhasm: stack64 caller2_stack
  19. # qhasm: stack64 caller3_stack
  20. # qhasm: stack64 caller4_stack
  21. # qhasm: stack64 caller5_stack
  22. # qhasm: stack64 caller6_stack
  23. # qhasm: stack64 caller7_stack
  24. # qhasm: int64 t10
  25. # qhasm: int64 t11
  26. # qhasm: int64 t12
  27. # qhasm: int64 t13
  28. # qhasm: stack64 t10_stack
  29. # qhasm: stack64 t11_stack
  30. # qhasm: stack64 t12_stack
  31. # qhasm: stack64 t13_stack
  32. # qhasm: int64 t20
  33. # qhasm: int64 t21
  34. # qhasm: int64 t22
  35. # qhasm: int64 t23
  36. # qhasm: stack64 t20_stack
  37. # qhasm: stack64 t21_stack
  38. # qhasm: stack64 t22_stack
  39. # qhasm: stack64 t23_stack
  40. # qhasm: int64 t30
  41. # qhasm: int64 t31
  42. # qhasm: int64 t32
  43. # qhasm: int64 t33
  44. # qhasm: stack64 t30_stack
  45. # qhasm: stack64 t31_stack
  46. # qhasm: stack64 t32_stack
  47. # qhasm: stack64 t33_stack
  48. # qhasm: int64 t40
  49. # qhasm: int64 t41
  50. # qhasm: int64 t42
  51. # qhasm: int64 t43
  52. # qhasm: stack64 t40_stack
  53. # qhasm: stack64 t41_stack
  54. # qhasm: stack64 t42_stack
  55. # qhasm: stack64 t43_stack
  56. # qhasm: int64 t50
  57. # qhasm: int64 t51
  58. # qhasm: int64 t52
  59. # qhasm: int64 t53
  60. # qhasm: stack64 t50_stack
  61. # qhasm: stack64 t51_stack
  62. # qhasm: stack64 t52_stack
  63. # qhasm: stack64 t53_stack
  64. # qhasm: int64 t60
  65. # qhasm: int64 t61
  66. # qhasm: int64 t62
  67. # qhasm: int64 t63
  68. # qhasm: stack64 t60_stack
  69. # qhasm: stack64 t61_stack
  70. # qhasm: stack64 t62_stack
  71. # qhasm: stack64 t63_stack
  72. # qhasm: int64 t70
  73. # qhasm: int64 t71
  74. # qhasm: int64 t72
  75. # qhasm: int64 t73
  76. # qhasm: stack64 t70_stack
  77. # qhasm: stack64 t71_stack
  78. # qhasm: stack64 t72_stack
  79. # qhasm: stack64 t73_stack
  80. # qhasm: int64 t80
  81. # qhasm: int64 t81
  82. # qhasm: int64 t82
  83. # qhasm: int64 t83
  84. # qhasm: stack64 t80_stack
  85. # qhasm: stack64 t81_stack
  86. # qhasm: stack64 t82_stack
  87. # qhasm: stack64 t83_stack
  88. # qhasm: int64 t90
  89. # qhasm: int64 t91
  90. # qhasm: int64 t92
  91. # qhasm: int64 t93
  92. # qhasm: stack64 t90_stack
  93. # qhasm: stack64 t91_stack
  94. # qhasm: stack64 t92_stack
  95. # qhasm: stack64 t93_stack
  96. # qhasm: int64 xp0
  97. # qhasm: int64 xp1
  98. # qhasm: int64 xp2
  99. # qhasm: int64 xp3
  100. # qhasm: int64 zp0
  101. # qhasm: int64 zp1
  102. # qhasm: int64 zp2
  103. # qhasm: int64 zp3
  104. # qhasm: int64 xq0
  105. # qhasm: int64 xq1
  106. # qhasm: int64 xq2
  107. # qhasm: int64 xq3
  108. # qhasm: int64 zq0
  109. # qhasm: int64 zq1
  110. # qhasm: int64 zq2
  111. # qhasm: int64 zq3
  112. # qhasm: int64 mulr4
  113. # qhasm: int64 mulr5
  114. # qhasm: int64 mulr6
  115. # qhasm: int64 mulr7
  116. # qhasm: int64 mulr8
  117. # qhasm: int64 mulrax
  118. # qhasm: int64 mulrdx
  119. # qhasm: int64 mulx0
  120. # qhasm: int64 mulx1
  121. # qhasm: int64 mulx2
  122. # qhasm: int64 mulx3
  123. # qhasm: int64 mulc
  124. # qhasm: int64 mulzero
  125. # qhasm: int64 muli38
  126. # qhasm: int64 squarer4
  127. # qhasm: int64 squarer5
  128. # qhasm: int64 squarer6
  129. # qhasm: int64 squarer7
  130. # qhasm: int64 squarer8
  131. # qhasm: int64 squarerax
  132. # qhasm: int64 squarerdx
  133. # qhasm: int64 squaret1
  134. # qhasm: int64 squaret2
  135. # qhasm: int64 squaret3
  136. # qhasm: int64 squarec
  137. # qhasm: int64 squarezero
  138. # qhasm: int64 squarei38
  139. # qhasm: int64 mul121666rax
  140. # qhasm: int64 mul121666rdx
  141. # qhasm: int64 mul121666r4
  142. # qhasm: int64 mul121666t1
  143. # qhasm: int64 mul121666t2
  144. # qhasm: int64 mul121666t3
  145. # qhasm: int64 addt0
  146. # qhasm: int64 addt1
  147. # qhasm: int64 subt0
  148. # qhasm: int64 subt1
  149. # qhasm: enter crypto_scalarmult_curve25519_amd64_64_ladderstep
  150. .text
  151. .p2align 5
  152. .globl _crypto_scalarmult_curve25519_amd64_64_ladderstep
  153. .globl crypto_scalarmult_curve25519_amd64_64_ladderstep
  154. _crypto_scalarmult_curve25519_amd64_64_ladderstep:
  155. crypto_scalarmult_curve25519_amd64_64_ladderstep:
  156. mov %rsp,%r11
  157. and $31,%r11
  158. add $288,%r11
  159. sub %r11,%rsp
  160. # qhasm: caller1_stack = caller1
  161. # asm 1: movq <caller1=int64#9,>caller1_stack=stack64#1
  162. # asm 2: movq <caller1=%r11,>caller1_stack=0(%rsp)
  163. movq %r11,0(%rsp)
  164. # qhasm: caller2_stack = caller2
  165. # asm 1: movq <caller2=int64#10,>caller2_stack=stack64#2
  166. # asm 2: movq <caller2=%r12,>caller2_stack=8(%rsp)
  167. movq %r12,8(%rsp)
  168. # qhasm: caller3_stack = caller3
  169. # asm 1: movq <caller3=int64#11,>caller3_stack=stack64#3
  170. # asm 2: movq <caller3=%r13,>caller3_stack=16(%rsp)
  171. movq %r13,16(%rsp)
  172. # qhasm: caller4_stack = caller4
  173. # asm 1: movq <caller4=int64#12,>caller4_stack=stack64#4
  174. # asm 2: movq <caller4=%r14,>caller4_stack=24(%rsp)
  175. movq %r14,24(%rsp)
  176. # qhasm: caller5_stack = caller5
  177. # asm 1: movq <caller5=int64#13,>caller5_stack=stack64#5
  178. # asm 2: movq <caller5=%r15,>caller5_stack=32(%rsp)
  179. movq %r15,32(%rsp)
  180. # qhasm: caller6_stack = caller6
  181. # asm 1: movq <caller6=int64#14,>caller6_stack=stack64#6
  182. # asm 2: movq <caller6=%rbx,>caller6_stack=40(%rsp)
  183. movq %rbx,40(%rsp)
  184. # qhasm: caller7_stack = caller7
  185. # asm 1: movq <caller7=int64#15,>caller7_stack=stack64#7
  186. # asm 2: movq <caller7=%rbp,>caller7_stack=48(%rsp)
  187. movq %rbp,48(%rsp)
  188. # qhasm: t10 = *(uint64 *)(workp + 32)
  189. # asm 1: movq 32(<workp=int64#1),>t10=int64#2
  190. # asm 2: movq 32(<workp=%rdi),>t10=%rsi
  191. movq 32(%rdi),%rsi
  192. # qhasm: t11 = *(uint64 *)(workp + 40)
  193. # asm 1: movq 40(<workp=int64#1),>t11=int64#3
  194. # asm 2: movq 40(<workp=%rdi),>t11=%rdx
  195. movq 40(%rdi),%rdx
  196. # qhasm: t12 = *(uint64 *)(workp + 48)
  197. # asm 1: movq 48(<workp=int64#1),>t12=int64#4
  198. # asm 2: movq 48(<workp=%rdi),>t12=%rcx
  199. movq 48(%rdi),%rcx
  200. # qhasm: t13 = *(uint64 *)(workp + 56)
  201. # asm 1: movq 56(<workp=int64#1),>t13=int64#5
  202. # asm 2: movq 56(<workp=%rdi),>t13=%r8
  203. movq 56(%rdi),%r8
  204. # qhasm: t20 = t10
  205. # asm 1: mov <t10=int64#2,>t20=int64#6
  206. # asm 2: mov <t10=%rsi,>t20=%r9
  207. mov %rsi,%r9
  208. # qhasm: t21 = t11
  209. # asm 1: mov <t11=int64#3,>t21=int64#7
  210. # asm 2: mov <t11=%rdx,>t21=%rax
  211. mov %rdx,%rax
  212. # qhasm: t22 = t12
  213. # asm 1: mov <t12=int64#4,>t22=int64#8
  214. # asm 2: mov <t12=%rcx,>t22=%r10
  215. mov %rcx,%r10
  216. # qhasm: t23 = t13
  217. # asm 1: mov <t13=int64#5,>t23=int64#9
  218. # asm 2: mov <t13=%r8,>t23=%r11
  219. mov %r8,%r11
  220. # qhasm: carry? t10 += *(uint64 *)(workp + 64)
  221. # asm 1: addq 64(<workp=int64#1),<t10=int64#2
  222. # asm 2: addq 64(<workp=%rdi),<t10=%rsi
  223. addq 64(%rdi),%rsi
  224. # qhasm: carry? t11 += *(uint64 *)(workp + 72) + carry
  225. # asm 1: adcq 72(<workp=int64#1),<t11=int64#3
  226. # asm 2: adcq 72(<workp=%rdi),<t11=%rdx
  227. adcq 72(%rdi),%rdx
  228. # qhasm: carry? t12 += *(uint64 *)(workp + 80) + carry
  229. # asm 1: adcq 80(<workp=int64#1),<t12=int64#4
  230. # asm 2: adcq 80(<workp=%rdi),<t12=%rcx
  231. adcq 80(%rdi),%rcx
  232. # qhasm: carry? t13 += *(uint64 *)(workp + 88) + carry
  233. # asm 1: adcq 88(<workp=int64#1),<t13=int64#5
  234. # asm 2: adcq 88(<workp=%rdi),<t13=%r8
  235. adcq 88(%rdi),%r8
  236. # qhasm: addt0 = 0
  237. # asm 1: mov $0,>addt0=int64#10
  238. # asm 2: mov $0,>addt0=%r12
  239. mov $0,%r12
  240. # qhasm: addt1 = 38
  241. # asm 1: mov $38,>addt1=int64#11
  242. # asm 2: mov $38,>addt1=%r13
  243. mov $38,%r13
  244. # qhasm: addt1 = addt0 if !carry
  245. # asm 1: cmovae <addt0=int64#10,<addt1=int64#11
  246. # asm 2: cmovae <addt0=%r12,<addt1=%r13
  247. cmovae %r12,%r13
  248. # qhasm: carry? t10 += addt1
  249. # asm 1: add <addt1=int64#11,<t10=int64#2
  250. # asm 2: add <addt1=%r13,<t10=%rsi
  251. add %r13,%rsi
  252. # qhasm: carry? t11 += addt0 + carry
  253. # asm 1: adc <addt0=int64#10,<t11=int64#3
  254. # asm 2: adc <addt0=%r12,<t11=%rdx
  255. adc %r12,%rdx
  256. # qhasm: carry? t12 += addt0 + carry
  257. # asm 1: adc <addt0=int64#10,<t12=int64#4
  258. # asm 2: adc <addt0=%r12,<t12=%rcx
  259. adc %r12,%rcx
  260. # qhasm: carry? t13 += addt0 + carry
  261. # asm 1: adc <addt0=int64#10,<t13=int64#5
  262. # asm 2: adc <addt0=%r12,<t13=%r8
  263. adc %r12,%r8
  264. # qhasm: addt0 = addt1 if carry
  265. # asm 1: cmovc <addt1=int64#11,<addt0=int64#10
  266. # asm 2: cmovc <addt1=%r13,<addt0=%r12
  267. cmovc %r13,%r12
  268. # qhasm: t10 += addt0
  269. # asm 1: add <addt0=int64#10,<t10=int64#2
  270. # asm 2: add <addt0=%r12,<t10=%rsi
  271. add %r12,%rsi
  272. # qhasm: carry? t20 -= *(uint64 *)(workp + 64)
  273. # asm 1: subq 64(<workp=int64#1),<t20=int64#6
  274. # asm 2: subq 64(<workp=%rdi),<t20=%r9
  275. subq 64(%rdi),%r9
  276. # qhasm: carry? t21 -= *(uint64 *)(workp + 72) - carry
  277. # asm 1: sbbq 72(<workp=int64#1),<t21=int64#7
  278. # asm 2: sbbq 72(<workp=%rdi),<t21=%rax
  279. sbbq 72(%rdi),%rax
  280. # qhasm: carry? t22 -= *(uint64 *)(workp + 80) - carry
  281. # asm 1: sbbq 80(<workp=int64#1),<t22=int64#8
  282. # asm 2: sbbq 80(<workp=%rdi),<t22=%r10
  283. sbbq 80(%rdi),%r10
  284. # qhasm: carry? t23 -= *(uint64 *)(workp + 88) - carry
  285. # asm 1: sbbq 88(<workp=int64#1),<t23=int64#9
  286. # asm 2: sbbq 88(<workp=%rdi),<t23=%r11
  287. sbbq 88(%rdi),%r11
  288. # qhasm: subt0 = 0
  289. # asm 1: mov $0,>subt0=int64#10
  290. # asm 2: mov $0,>subt0=%r12
  291. mov $0,%r12
  292. # qhasm: subt1 = 38
  293. # asm 1: mov $38,>subt1=int64#11
  294. # asm 2: mov $38,>subt1=%r13
  295. mov $38,%r13
  296. # qhasm: subt1 = subt0 if !carry
  297. # asm 1: cmovae <subt0=int64#10,<subt1=int64#11
  298. # asm 2: cmovae <subt0=%r12,<subt1=%r13
  299. cmovae %r12,%r13
  300. # qhasm: carry? t20 -= subt1
  301. # asm 1: sub <subt1=int64#11,<t20=int64#6
  302. # asm 2: sub <subt1=%r13,<t20=%r9
  303. sub %r13,%r9
  304. # qhasm: carry? t21 -= subt0 - carry
  305. # asm 1: sbb <subt0=int64#10,<t21=int64#7
  306. # asm 2: sbb <subt0=%r12,<t21=%rax
  307. sbb %r12,%rax
  308. # qhasm: carry? t22 -= subt0 - carry
  309. # asm 1: sbb <subt0=int64#10,<t22=int64#8
  310. # asm 2: sbb <subt0=%r12,<t22=%r10
  311. sbb %r12,%r10
  312. # qhasm: carry? t23 -= subt0 - carry
  313. # asm 1: sbb <subt0=int64#10,<t23=int64#9
  314. # asm 2: sbb <subt0=%r12,<t23=%r11
  315. sbb %r12,%r11
  316. # qhasm: subt0 = subt1 if carry
  317. # asm 1: cmovc <subt1=int64#11,<subt0=int64#10
  318. # asm 2: cmovc <subt1=%r13,<subt0=%r12
  319. cmovc %r13,%r12
  320. # qhasm: t20 -= subt0
  321. # asm 1: sub <subt0=int64#10,<t20=int64#6
  322. # asm 2: sub <subt0=%r12,<t20=%r9
  323. sub %r12,%r9
  324. # qhasm: t10_stack = t10
  325. # asm 1: movq <t10=int64#2,>t10_stack=stack64#8
  326. # asm 2: movq <t10=%rsi,>t10_stack=56(%rsp)
  327. movq %rsi,56(%rsp)
  328. # qhasm: t11_stack = t11
  329. # asm 1: movq <t11=int64#3,>t11_stack=stack64#9
  330. # asm 2: movq <t11=%rdx,>t11_stack=64(%rsp)
  331. movq %rdx,64(%rsp)
  332. # qhasm: t12_stack = t12
  333. # asm 1: movq <t12=int64#4,>t12_stack=stack64#10
  334. # asm 2: movq <t12=%rcx,>t12_stack=72(%rsp)
  335. movq %rcx,72(%rsp)
  336. # qhasm: t13_stack = t13
  337. # asm 1: movq <t13=int64#5,>t13_stack=stack64#11
  338. # asm 2: movq <t13=%r8,>t13_stack=80(%rsp)
  339. movq %r8,80(%rsp)
  340. # qhasm: t20_stack = t20
  341. # asm 1: movq <t20=int64#6,>t20_stack=stack64#12
  342. # asm 2: movq <t20=%r9,>t20_stack=88(%rsp)
  343. movq %r9,88(%rsp)
  344. # qhasm: t21_stack = t21
  345. # asm 1: movq <t21=int64#7,>t21_stack=stack64#13
  346. # asm 2: movq <t21=%rax,>t21_stack=96(%rsp)
  347. movq %rax,96(%rsp)
  348. # qhasm: t22_stack = t22
  349. # asm 1: movq <t22=int64#8,>t22_stack=stack64#14
  350. # asm 2: movq <t22=%r10,>t22_stack=104(%rsp)
  351. movq %r10,104(%rsp)
  352. # qhasm: t23_stack = t23
  353. # asm 1: movq <t23=int64#9,>t23_stack=stack64#15
  354. # asm 2: movq <t23=%r11,>t23_stack=112(%rsp)
  355. movq %r11,112(%rsp)
  356. # qhasm: squarer7 = 0
  357. # asm 1: mov $0,>squarer7=int64#2
  358. # asm 2: mov $0,>squarer7=%rsi
  359. mov $0,%rsi
  360. # qhasm: squarerax = t21_stack
  361. # asm 1: movq <t21_stack=stack64#13,>squarerax=int64#7
  362. # asm 2: movq <t21_stack=96(%rsp),>squarerax=%rax
  363. movq 96(%rsp),%rax
  364. # qhasm: (uint128) squarerdx squarerax = squarerax * t20_stack
  365. # asm 1: mulq <t20_stack=stack64#12
  366. # asm 2: mulq <t20_stack=88(%rsp)
  367. mulq 88(%rsp)
  368. # qhasm: t71 = squarerax
  369. # asm 1: mov <squarerax=int64#7,>t71=int64#4
  370. # asm 2: mov <squarerax=%rax,>t71=%rcx
  371. mov %rax,%rcx
  372. # qhasm: t72 = squarerdx
  373. # asm 1: mov <squarerdx=int64#3,>t72=int64#5
  374. # asm 2: mov <squarerdx=%rdx,>t72=%r8
  375. mov %rdx,%r8
  376. # qhasm: squarerax = t22_stack
  377. # asm 1: movq <t22_stack=stack64#14,>squarerax=int64#7
  378. # asm 2: movq <t22_stack=104(%rsp),>squarerax=%rax
  379. movq 104(%rsp),%rax
  380. # qhasm: (uint128) squarerdx squarerax = squarerax * t21_stack
  381. # asm 1: mulq <t21_stack=stack64#13
  382. # asm 2: mulq <t21_stack=96(%rsp)
  383. mulq 96(%rsp)
  384. # qhasm: t73 = squarerax
  385. # asm 1: mov <squarerax=int64#7,>t73=int64#6
  386. # asm 2: mov <squarerax=%rax,>t73=%r9
  387. mov %rax,%r9
  388. # qhasm: squarer4 = squarerdx
  389. # asm 1: mov <squarerdx=int64#3,>squarer4=int64#8
  390. # asm 2: mov <squarerdx=%rdx,>squarer4=%r10
  391. mov %rdx,%r10
  392. # qhasm: squarerax = t23_stack
  393. # asm 1: movq <t23_stack=stack64#15,>squarerax=int64#7
  394. # asm 2: movq <t23_stack=112(%rsp),>squarerax=%rax
  395. movq 112(%rsp),%rax
  396. # qhasm: (uint128) squarerdx squarerax = squarerax * t22_stack
  397. # asm 1: mulq <t22_stack=stack64#14
  398. # asm 2: mulq <t22_stack=104(%rsp)
  399. mulq 104(%rsp)
  400. # qhasm: squarer5 = squarerax
  401. # asm 1: mov <squarerax=int64#7,>squarer5=int64#9
  402. # asm 2: mov <squarerax=%rax,>squarer5=%r11
  403. mov %rax,%r11
  404. # qhasm: squarer6 = squarerdx
  405. # asm 1: mov <squarerdx=int64#3,>squarer6=int64#10
  406. # asm 2: mov <squarerdx=%rdx,>squarer6=%r12
  407. mov %rdx,%r12
  408. # qhasm: squarerax = t22_stack
  409. # asm 1: movq <t22_stack=stack64#14,>squarerax=int64#7
  410. # asm 2: movq <t22_stack=104(%rsp),>squarerax=%rax
  411. movq 104(%rsp),%rax
  412. # qhasm: (uint128) squarerdx squarerax = squarerax * t20_stack
  413. # asm 1: mulq <t20_stack=stack64#12
  414. # asm 2: mulq <t20_stack=88(%rsp)
  415. mulq 88(%rsp)
  416. # qhasm: carry? t72 += squarerax
  417. # asm 1: add <squarerax=int64#7,<t72=int64#5
  418. # asm 2: add <squarerax=%rax,<t72=%r8
  419. add %rax,%r8
  420. # qhasm: carry? t73 += squarerdx + carry
  421. # asm 1: adc <squarerdx=int64#3,<t73=int64#6
  422. # asm 2: adc <squarerdx=%rdx,<t73=%r9
  423. adc %rdx,%r9
  424. # qhasm: squarer4 += 0 + carry
  425. # asm 1: adc $0,<squarer4=int64#8
  426. # asm 2: adc $0,<squarer4=%r10
  427. adc $0,%r10
  428. # qhasm: squarerax = t23_stack
  429. # asm 1: movq <t23_stack=stack64#15,>squarerax=int64#7
  430. # asm 2: movq <t23_stack=112(%rsp),>squarerax=%rax
  431. movq 112(%rsp),%rax
  432. # qhasm: (uint128) squarerdx squarerax = squarerax * t21_stack
  433. # asm 1: mulq <t21_stack=stack64#13
  434. # asm 2: mulq <t21_stack=96(%rsp)
  435. mulq 96(%rsp)
  436. # qhasm: carry? squarer4 += squarerax
  437. # asm 1: add <squarerax=int64#7,<squarer4=int64#8
  438. # asm 2: add <squarerax=%rax,<squarer4=%r10
  439. add %rax,%r10
  440. # qhasm: carry? squarer5 += squarerdx + carry
  441. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  442. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  443. adc %rdx,%r11
  444. # qhasm: squarer6 += 0 + carry
  445. # asm 1: adc $0,<squarer6=int64#10
  446. # asm 2: adc $0,<squarer6=%r12
  447. adc $0,%r12
  448. # qhasm: squarerax = t23_stack
  449. # asm 1: movq <t23_stack=stack64#15,>squarerax=int64#7
  450. # asm 2: movq <t23_stack=112(%rsp),>squarerax=%rax
  451. movq 112(%rsp),%rax
  452. # qhasm: (uint128) squarerdx squarerax = squarerax * t20_stack
  453. # asm 1: mulq <t20_stack=stack64#12
  454. # asm 2: mulq <t20_stack=88(%rsp)
  455. mulq 88(%rsp)
  456. # qhasm: carry? t73 += squarerax
  457. # asm 1: add <squarerax=int64#7,<t73=int64#6
  458. # asm 2: add <squarerax=%rax,<t73=%r9
  459. add %rax,%r9
  460. # qhasm: carry? squarer4 += squarerdx + carry
  461. # asm 1: adc <squarerdx=int64#3,<squarer4=int64#8
  462. # asm 2: adc <squarerdx=%rdx,<squarer4=%r10
  463. adc %rdx,%r10
  464. # qhasm: carry? squarer5 += 0 + carry
  465. # asm 1: adc $0,<squarer5=int64#9
  466. # asm 2: adc $0,<squarer5=%r11
  467. adc $0,%r11
  468. # qhasm: carry? squarer6 += 0 + carry
  469. # asm 1: adc $0,<squarer6=int64#10
  470. # asm 2: adc $0,<squarer6=%r12
  471. adc $0,%r12
  472. # qhasm: squarer7 += 0 + carry
  473. # asm 1: adc $0,<squarer7=int64#2
  474. # asm 2: adc $0,<squarer7=%rsi
  475. adc $0,%rsi
  476. # qhasm: carry? t71 += t71
  477. # asm 1: add <t71=int64#4,<t71=int64#4
  478. # asm 2: add <t71=%rcx,<t71=%rcx
  479. add %rcx,%rcx
  480. # qhasm: carry? t72 += t72 + carry
  481. # asm 1: adc <t72=int64#5,<t72=int64#5
  482. # asm 2: adc <t72=%r8,<t72=%r8
  483. adc %r8,%r8
  484. # qhasm: carry? t73 += t73 + carry
  485. # asm 1: adc <t73=int64#6,<t73=int64#6
  486. # asm 2: adc <t73=%r9,<t73=%r9
  487. adc %r9,%r9
  488. # qhasm: carry? squarer4 += squarer4 + carry
  489. # asm 1: adc <squarer4=int64#8,<squarer4=int64#8
  490. # asm 2: adc <squarer4=%r10,<squarer4=%r10
  491. adc %r10,%r10
  492. # qhasm: carry? squarer5 += squarer5 + carry
  493. # asm 1: adc <squarer5=int64#9,<squarer5=int64#9
  494. # asm 2: adc <squarer5=%r11,<squarer5=%r11
  495. adc %r11,%r11
  496. # qhasm: carry? squarer6 += squarer6 + carry
  497. # asm 1: adc <squarer6=int64#10,<squarer6=int64#10
  498. # asm 2: adc <squarer6=%r12,<squarer6=%r12
  499. adc %r12,%r12
  500. # qhasm: squarer7 += squarer7 + carry
  501. # asm 1: adc <squarer7=int64#2,<squarer7=int64#2
  502. # asm 2: adc <squarer7=%rsi,<squarer7=%rsi
  503. adc %rsi,%rsi
  504. # qhasm: squarerax = t20_stack
  505. # asm 1: movq <t20_stack=stack64#12,>squarerax=int64#7
  506. # asm 2: movq <t20_stack=88(%rsp),>squarerax=%rax
  507. movq 88(%rsp),%rax
  508. # qhasm: (uint128) squarerdx squarerax = squarerax * t20_stack
  509. # asm 1: mulq <t20_stack=stack64#12
  510. # asm 2: mulq <t20_stack=88(%rsp)
  511. mulq 88(%rsp)
  512. # qhasm: t70 = squarerax
  513. # asm 1: mov <squarerax=int64#7,>t70=int64#11
  514. # asm 2: mov <squarerax=%rax,>t70=%r13
  515. mov %rax,%r13
  516. # qhasm: squaret1 = squarerdx
  517. # asm 1: mov <squarerdx=int64#3,>squaret1=int64#12
  518. # asm 2: mov <squarerdx=%rdx,>squaret1=%r14
  519. mov %rdx,%r14
  520. # qhasm: squarerax = t21_stack
  521. # asm 1: movq <t21_stack=stack64#13,>squarerax=int64#7
  522. # asm 2: movq <t21_stack=96(%rsp),>squarerax=%rax
  523. movq 96(%rsp),%rax
  524. # qhasm: (uint128) squarerdx squarerax = squarerax * t21_stack
  525. # asm 1: mulq <t21_stack=stack64#13
  526. # asm 2: mulq <t21_stack=96(%rsp)
  527. mulq 96(%rsp)
  528. # qhasm: squaret2 = squarerax
  529. # asm 1: mov <squarerax=int64#7,>squaret2=int64#13
  530. # asm 2: mov <squarerax=%rax,>squaret2=%r15
  531. mov %rax,%r15
  532. # qhasm: squaret3 = squarerdx
  533. # asm 1: mov <squarerdx=int64#3,>squaret3=int64#14
  534. # asm 2: mov <squarerdx=%rdx,>squaret3=%rbx
  535. mov %rdx,%rbx
  536. # qhasm: squarerax = t22_stack
  537. # asm 1: movq <t22_stack=stack64#14,>squarerax=int64#7
  538. # asm 2: movq <t22_stack=104(%rsp),>squarerax=%rax
  539. movq 104(%rsp),%rax
  540. # qhasm: (uint128) squarerdx squarerax = squarerax * t22_stack
  541. # asm 1: mulq <t22_stack=stack64#14
  542. # asm 2: mulq <t22_stack=104(%rsp)
  543. mulq 104(%rsp)
  544. # qhasm: carry? t71 += squaret1
  545. # asm 1: add <squaret1=int64#12,<t71=int64#4
  546. # asm 2: add <squaret1=%r14,<t71=%rcx
  547. add %r14,%rcx
  548. # qhasm: carry? t72 += squaret2 + carry
  549. # asm 1: adc <squaret2=int64#13,<t72=int64#5
  550. # asm 2: adc <squaret2=%r15,<t72=%r8
  551. adc %r15,%r8
  552. # qhasm: carry? t73 += squaret3 + carry
  553. # asm 1: adc <squaret3=int64#14,<t73=int64#6
  554. # asm 2: adc <squaret3=%rbx,<t73=%r9
  555. adc %rbx,%r9
  556. # qhasm: carry? squarer4 += squarerax + carry
  557. # asm 1: adc <squarerax=int64#7,<squarer4=int64#8
  558. # asm 2: adc <squarerax=%rax,<squarer4=%r10
  559. adc %rax,%r10
  560. # qhasm: carry? squarer5 += squarerdx + carry
  561. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  562. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  563. adc %rdx,%r11
  564. # qhasm: carry? squarer6 += 0 + carry
  565. # asm 1: adc $0,<squarer6=int64#10
  566. # asm 2: adc $0,<squarer6=%r12
  567. adc $0,%r12
  568. # qhasm: squarer7 += 0 + carry
  569. # asm 1: adc $0,<squarer7=int64#2
  570. # asm 2: adc $0,<squarer7=%rsi
  571. adc $0,%rsi
  572. # qhasm: squarerax = t23_stack
  573. # asm 1: movq <t23_stack=stack64#15,>squarerax=int64#7
  574. # asm 2: movq <t23_stack=112(%rsp),>squarerax=%rax
  575. movq 112(%rsp),%rax
  576. # qhasm: (uint128) squarerdx squarerax = squarerax * t23_stack
  577. # asm 1: mulq <t23_stack=stack64#15
  578. # asm 2: mulq <t23_stack=112(%rsp)
  579. mulq 112(%rsp)
  580. # qhasm: carry? squarer6 += squarerax
  581. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  582. # asm 2: add <squarerax=%rax,<squarer6=%r12
  583. add %rax,%r12
  584. # qhasm: squarer7 += squarerdx + carry
  585. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  586. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  587. adc %rdx,%rsi
  588. # qhasm: squarerax = squarer4
  589. # asm 1: mov <squarer4=int64#8,>squarerax=int64#7
  590. # asm 2: mov <squarer4=%r10,>squarerax=%rax
  591. mov %r10,%rax
  592. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  593. mulq crypto_scalarmult_curve25519_amd64_64_38
  594. # qhasm: squarer4 = squarerax
  595. # asm 1: mov <squarerax=int64#7,>squarer4=int64#8
  596. # asm 2: mov <squarerax=%rax,>squarer4=%r10
  597. mov %rax,%r10
  598. # qhasm: squarerax = squarer5
  599. # asm 1: mov <squarer5=int64#9,>squarerax=int64#7
  600. # asm 2: mov <squarer5=%r11,>squarerax=%rax
  601. mov %r11,%rax
  602. # qhasm: squarer5 = squarerdx
  603. # asm 1: mov <squarerdx=int64#3,>squarer5=int64#9
  604. # asm 2: mov <squarerdx=%rdx,>squarer5=%r11
  605. mov %rdx,%r11
  606. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  607. mulq crypto_scalarmult_curve25519_amd64_64_38
  608. # qhasm: carry? squarer5 += squarerax
  609. # asm 1: add <squarerax=int64#7,<squarer5=int64#9
  610. # asm 2: add <squarerax=%rax,<squarer5=%r11
  611. add %rax,%r11
  612. # qhasm: squarerax = squarer6
  613. # asm 1: mov <squarer6=int64#10,>squarerax=int64#7
  614. # asm 2: mov <squarer6=%r12,>squarerax=%rax
  615. mov %r12,%rax
  616. # qhasm: squarer6 = 0
  617. # asm 1: mov $0,>squarer6=int64#10
  618. # asm 2: mov $0,>squarer6=%r12
  619. mov $0,%r12
  620. # qhasm: squarer6 += squarerdx + carry
  621. # asm 1: adc <squarerdx=int64#3,<squarer6=int64#10
  622. # asm 2: adc <squarerdx=%rdx,<squarer6=%r12
  623. adc %rdx,%r12
  624. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  625. mulq crypto_scalarmult_curve25519_amd64_64_38
  626. # qhasm: carry? squarer6 += squarerax
  627. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  628. # asm 2: add <squarerax=%rax,<squarer6=%r12
  629. add %rax,%r12
  630. # qhasm: squarerax = squarer7
  631. # asm 1: mov <squarer7=int64#2,>squarerax=int64#7
  632. # asm 2: mov <squarer7=%rsi,>squarerax=%rax
  633. mov %rsi,%rax
  634. # qhasm: squarer7 = 0
  635. # asm 1: mov $0,>squarer7=int64#2
  636. # asm 2: mov $0,>squarer7=%rsi
  637. mov $0,%rsi
  638. # qhasm: squarer7 += squarerdx + carry
  639. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  640. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  641. adc %rdx,%rsi
  642. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  643. mulq crypto_scalarmult_curve25519_amd64_64_38
  644. # qhasm: carry? squarer7 += squarerax
  645. # asm 1: add <squarerax=int64#7,<squarer7=int64#2
  646. # asm 2: add <squarerax=%rax,<squarer7=%rsi
  647. add %rax,%rsi
  648. # qhasm: squarer8 = 0
  649. # asm 1: mov $0,>squarer8=int64#7
  650. # asm 2: mov $0,>squarer8=%rax
  651. mov $0,%rax
  652. # qhasm: squarer8 += squarerdx + carry
  653. # asm 1: adc <squarerdx=int64#3,<squarer8=int64#7
  654. # asm 2: adc <squarerdx=%rdx,<squarer8=%rax
  655. adc %rdx,%rax
  656. # qhasm: carry? t70 += squarer4
  657. # asm 1: add <squarer4=int64#8,<t70=int64#11
  658. # asm 2: add <squarer4=%r10,<t70=%r13
  659. add %r10,%r13
  660. # qhasm: carry? t71 += squarer5 + carry
  661. # asm 1: adc <squarer5=int64#9,<t71=int64#4
  662. # asm 2: adc <squarer5=%r11,<t71=%rcx
  663. adc %r11,%rcx
  664. # qhasm: carry? t72 += squarer6 + carry
  665. # asm 1: adc <squarer6=int64#10,<t72=int64#5
  666. # asm 2: adc <squarer6=%r12,<t72=%r8
  667. adc %r12,%r8
  668. # qhasm: carry? t73 += squarer7 + carry
  669. # asm 1: adc <squarer7=int64#2,<t73=int64#6
  670. # asm 2: adc <squarer7=%rsi,<t73=%r9
  671. adc %rsi,%r9
  672. # qhasm: squarezero = 0
  673. # asm 1: mov $0,>squarezero=int64#2
  674. # asm 2: mov $0,>squarezero=%rsi
  675. mov $0,%rsi
  676. # qhasm: squarer8 += squarezero + carry
  677. # asm 1: adc <squarezero=int64#2,<squarer8=int64#7
  678. # asm 2: adc <squarezero=%rsi,<squarer8=%rax
  679. adc %rsi,%rax
  680. # qhasm: squarer8 *= 38
  681. # asm 1: imulq $38,<squarer8=int64#7,>squarer8=int64#3
  682. # asm 2: imulq $38,<squarer8=%rax,>squarer8=%rdx
  683. imulq $38,%rax,%rdx
  684. # qhasm: carry? t70 += squarer8
  685. # asm 1: add <squarer8=int64#3,<t70=int64#11
  686. # asm 2: add <squarer8=%rdx,<t70=%r13
  687. add %rdx,%r13
  688. # qhasm: carry? t71 += squarezero + carry
  689. # asm 1: adc <squarezero=int64#2,<t71=int64#4
  690. # asm 2: adc <squarezero=%rsi,<t71=%rcx
  691. adc %rsi,%rcx
  692. # qhasm: carry? t72 += squarezero + carry
  693. # asm 1: adc <squarezero=int64#2,<t72=int64#5
  694. # asm 2: adc <squarezero=%rsi,<t72=%r8
  695. adc %rsi,%r8
  696. # qhasm: carry? t73 += squarezero + carry
  697. # asm 1: adc <squarezero=int64#2,<t73=int64#6
  698. # asm 2: adc <squarezero=%rsi,<t73=%r9
  699. adc %rsi,%r9
  700. # qhasm: squarezero += squarezero + carry
  701. # asm 1: adc <squarezero=int64#2,<squarezero=int64#2
  702. # asm 2: adc <squarezero=%rsi,<squarezero=%rsi
  703. adc %rsi,%rsi
  704. # qhasm: squarezero *= 38
  705. # asm 1: imulq $38,<squarezero=int64#2,>squarezero=int64#2
  706. # asm 2: imulq $38,<squarezero=%rsi,>squarezero=%rsi
  707. imulq $38,%rsi,%rsi
  708. # qhasm: t70 += squarezero
  709. # asm 1: add <squarezero=int64#2,<t70=int64#11
  710. # asm 2: add <squarezero=%rsi,<t70=%r13
  711. add %rsi,%r13
  712. # qhasm: t70_stack = t70
  713. # asm 1: movq <t70=int64#11,>t70_stack=stack64#16
  714. # asm 2: movq <t70=%r13,>t70_stack=120(%rsp)
  715. movq %r13,120(%rsp)
  716. # qhasm: t71_stack = t71
  717. # asm 1: movq <t71=int64#4,>t71_stack=stack64#17
  718. # asm 2: movq <t71=%rcx,>t71_stack=128(%rsp)
  719. movq %rcx,128(%rsp)
  720. # qhasm: t72_stack = t72
  721. # asm 1: movq <t72=int64#5,>t72_stack=stack64#18
  722. # asm 2: movq <t72=%r8,>t72_stack=136(%rsp)
  723. movq %r8,136(%rsp)
  724. # qhasm: t73_stack = t73
  725. # asm 1: movq <t73=int64#6,>t73_stack=stack64#19
  726. # asm 2: movq <t73=%r9,>t73_stack=144(%rsp)
  727. movq %r9,144(%rsp)
  728. # qhasm: squarer7 = 0
  729. # asm 1: mov $0,>squarer7=int64#2
  730. # asm 2: mov $0,>squarer7=%rsi
  731. mov $0,%rsi
  732. # qhasm: squarerax = t11_stack
  733. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#7
  734. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rax
  735. movq 64(%rsp),%rax
  736. # qhasm: (uint128) squarerdx squarerax = squarerax * t10_stack
  737. # asm 1: mulq <t10_stack=stack64#8
  738. # asm 2: mulq <t10_stack=56(%rsp)
  739. mulq 56(%rsp)
  740. # qhasm: t61 = squarerax
  741. # asm 1: mov <squarerax=int64#7,>t61=int64#4
  742. # asm 2: mov <squarerax=%rax,>t61=%rcx
  743. mov %rax,%rcx
  744. # qhasm: t62 = squarerdx
  745. # asm 1: mov <squarerdx=int64#3,>t62=int64#5
  746. # asm 2: mov <squarerdx=%rdx,>t62=%r8
  747. mov %rdx,%r8
  748. # qhasm: squarerax = t12_stack
  749. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#7
  750. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rax
  751. movq 72(%rsp),%rax
  752. # qhasm: (uint128) squarerdx squarerax = squarerax * t11_stack
  753. # asm 1: mulq <t11_stack=stack64#9
  754. # asm 2: mulq <t11_stack=64(%rsp)
  755. mulq 64(%rsp)
  756. # qhasm: t63 = squarerax
  757. # asm 1: mov <squarerax=int64#7,>t63=int64#6
  758. # asm 2: mov <squarerax=%rax,>t63=%r9
  759. mov %rax,%r9
  760. # qhasm: squarer4 = squarerdx
  761. # asm 1: mov <squarerdx=int64#3,>squarer4=int64#8
  762. # asm 2: mov <squarerdx=%rdx,>squarer4=%r10
  763. mov %rdx,%r10
  764. # qhasm: squarerax = t13_stack
  765. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#7
  766. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rax
  767. movq 80(%rsp),%rax
  768. # qhasm: (uint128) squarerdx squarerax = squarerax * t12_stack
  769. # asm 1: mulq <t12_stack=stack64#10
  770. # asm 2: mulq <t12_stack=72(%rsp)
  771. mulq 72(%rsp)
  772. # qhasm: squarer5 = squarerax
  773. # asm 1: mov <squarerax=int64#7,>squarer5=int64#9
  774. # asm 2: mov <squarerax=%rax,>squarer5=%r11
  775. mov %rax,%r11
  776. # qhasm: squarer6 = squarerdx
  777. # asm 1: mov <squarerdx=int64#3,>squarer6=int64#10
  778. # asm 2: mov <squarerdx=%rdx,>squarer6=%r12
  779. mov %rdx,%r12
  780. # qhasm: squarerax = t12_stack
  781. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#7
  782. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rax
  783. movq 72(%rsp),%rax
  784. # qhasm: (uint128) squarerdx squarerax = squarerax * t10_stack
  785. # asm 1: mulq <t10_stack=stack64#8
  786. # asm 2: mulq <t10_stack=56(%rsp)
  787. mulq 56(%rsp)
  788. # qhasm: carry? t62 += squarerax
  789. # asm 1: add <squarerax=int64#7,<t62=int64#5
  790. # asm 2: add <squarerax=%rax,<t62=%r8
  791. add %rax,%r8
  792. # qhasm: carry? t63 += squarerdx + carry
  793. # asm 1: adc <squarerdx=int64#3,<t63=int64#6
  794. # asm 2: adc <squarerdx=%rdx,<t63=%r9
  795. adc %rdx,%r9
  796. # qhasm: squarer4 += 0 + carry
  797. # asm 1: adc $0,<squarer4=int64#8
  798. # asm 2: adc $0,<squarer4=%r10
  799. adc $0,%r10
  800. # qhasm: squarerax = t13_stack
  801. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#7
  802. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rax
  803. movq 80(%rsp),%rax
  804. # qhasm: (uint128) squarerdx squarerax = squarerax * t11_stack
  805. # asm 1: mulq <t11_stack=stack64#9
  806. # asm 2: mulq <t11_stack=64(%rsp)
  807. mulq 64(%rsp)
  808. # qhasm: carry? squarer4 += squarerax
  809. # asm 1: add <squarerax=int64#7,<squarer4=int64#8
  810. # asm 2: add <squarerax=%rax,<squarer4=%r10
  811. add %rax,%r10
  812. # qhasm: carry? squarer5 += squarerdx + carry
  813. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  814. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  815. adc %rdx,%r11
  816. # qhasm: squarer6 += 0 + carry
  817. # asm 1: adc $0,<squarer6=int64#10
  818. # asm 2: adc $0,<squarer6=%r12
  819. adc $0,%r12
  820. # qhasm: squarerax = t13_stack
  821. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#7
  822. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rax
  823. movq 80(%rsp),%rax
  824. # qhasm: (uint128) squarerdx squarerax = squarerax * t10_stack
  825. # asm 1: mulq <t10_stack=stack64#8
  826. # asm 2: mulq <t10_stack=56(%rsp)
  827. mulq 56(%rsp)
  828. # qhasm: carry? t63 += squarerax
  829. # asm 1: add <squarerax=int64#7,<t63=int64#6
  830. # asm 2: add <squarerax=%rax,<t63=%r9
  831. add %rax,%r9
  832. # qhasm: carry? squarer4 += squarerdx + carry
  833. # asm 1: adc <squarerdx=int64#3,<squarer4=int64#8
  834. # asm 2: adc <squarerdx=%rdx,<squarer4=%r10
  835. adc %rdx,%r10
  836. # qhasm: carry? squarer5 += 0 + carry
  837. # asm 1: adc $0,<squarer5=int64#9
  838. # asm 2: adc $0,<squarer5=%r11
  839. adc $0,%r11
  840. # qhasm: carry? squarer6 += 0 + carry
  841. # asm 1: adc $0,<squarer6=int64#10
  842. # asm 2: adc $0,<squarer6=%r12
  843. adc $0,%r12
  844. # qhasm: squarer7 += 0 + carry
  845. # asm 1: adc $0,<squarer7=int64#2
  846. # asm 2: adc $0,<squarer7=%rsi
  847. adc $0,%rsi
  848. # qhasm: carry? t61 += t61
  849. # asm 1: add <t61=int64#4,<t61=int64#4
  850. # asm 2: add <t61=%rcx,<t61=%rcx
  851. add %rcx,%rcx
  852. # qhasm: carry? t62 += t62 + carry
  853. # asm 1: adc <t62=int64#5,<t62=int64#5
  854. # asm 2: adc <t62=%r8,<t62=%r8
  855. adc %r8,%r8
  856. # qhasm: carry? t63 += t63 + carry
  857. # asm 1: adc <t63=int64#6,<t63=int64#6
  858. # asm 2: adc <t63=%r9,<t63=%r9
  859. adc %r9,%r9
  860. # qhasm: carry? squarer4 += squarer4 + carry
  861. # asm 1: adc <squarer4=int64#8,<squarer4=int64#8
  862. # asm 2: adc <squarer4=%r10,<squarer4=%r10
  863. adc %r10,%r10
  864. # qhasm: carry? squarer5 += squarer5 + carry
  865. # asm 1: adc <squarer5=int64#9,<squarer5=int64#9
  866. # asm 2: adc <squarer5=%r11,<squarer5=%r11
  867. adc %r11,%r11
  868. # qhasm: carry? squarer6 += squarer6 + carry
  869. # asm 1: adc <squarer6=int64#10,<squarer6=int64#10
  870. # asm 2: adc <squarer6=%r12,<squarer6=%r12
  871. adc %r12,%r12
  872. # qhasm: squarer7 += squarer7 + carry
  873. # asm 1: adc <squarer7=int64#2,<squarer7=int64#2
  874. # asm 2: adc <squarer7=%rsi,<squarer7=%rsi
  875. adc %rsi,%rsi
  876. # qhasm: squarerax = t10_stack
  877. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  878. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  879. movq 56(%rsp),%rax
  880. # qhasm: (uint128) squarerdx squarerax = squarerax * t10_stack
  881. # asm 1: mulq <t10_stack=stack64#8
  882. # asm 2: mulq <t10_stack=56(%rsp)
  883. mulq 56(%rsp)
  884. # qhasm: t60 = squarerax
  885. # asm 1: mov <squarerax=int64#7,>t60=int64#11
  886. # asm 2: mov <squarerax=%rax,>t60=%r13
  887. mov %rax,%r13
  888. # qhasm: squaret1 = squarerdx
  889. # asm 1: mov <squarerdx=int64#3,>squaret1=int64#12
  890. # asm 2: mov <squarerdx=%rdx,>squaret1=%r14
  891. mov %rdx,%r14
  892. # qhasm: squarerax = t11_stack
  893. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#7
  894. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rax
  895. movq 64(%rsp),%rax
  896. # qhasm: (uint128) squarerdx squarerax = squarerax * t11_stack
  897. # asm 1: mulq <t11_stack=stack64#9
  898. # asm 2: mulq <t11_stack=64(%rsp)
  899. mulq 64(%rsp)
  900. # qhasm: squaret2 = squarerax
  901. # asm 1: mov <squarerax=int64#7,>squaret2=int64#13
  902. # asm 2: mov <squarerax=%rax,>squaret2=%r15
  903. mov %rax,%r15
  904. # qhasm: squaret3 = squarerdx
  905. # asm 1: mov <squarerdx=int64#3,>squaret3=int64#14
  906. # asm 2: mov <squarerdx=%rdx,>squaret3=%rbx
  907. mov %rdx,%rbx
  908. # qhasm: squarerax = t12_stack
  909. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#7
  910. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rax
  911. movq 72(%rsp),%rax
  912. # qhasm: (uint128) squarerdx squarerax = squarerax * t12_stack
  913. # asm 1: mulq <t12_stack=stack64#10
  914. # asm 2: mulq <t12_stack=72(%rsp)
  915. mulq 72(%rsp)
  916. # qhasm: carry? t61 += squaret1
  917. # asm 1: add <squaret1=int64#12,<t61=int64#4
  918. # asm 2: add <squaret1=%r14,<t61=%rcx
  919. add %r14,%rcx
  920. # qhasm: carry? t62 += squaret2 + carry
  921. # asm 1: adc <squaret2=int64#13,<t62=int64#5
  922. # asm 2: adc <squaret2=%r15,<t62=%r8
  923. adc %r15,%r8
  924. # qhasm: carry? t63 += squaret3 + carry
  925. # asm 1: adc <squaret3=int64#14,<t63=int64#6
  926. # asm 2: adc <squaret3=%rbx,<t63=%r9
  927. adc %rbx,%r9
  928. # qhasm: carry? squarer4 += squarerax + carry
  929. # asm 1: adc <squarerax=int64#7,<squarer4=int64#8
  930. # asm 2: adc <squarerax=%rax,<squarer4=%r10
  931. adc %rax,%r10
  932. # qhasm: carry? squarer5 += squarerdx + carry
  933. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  934. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  935. adc %rdx,%r11
  936. # qhasm: carry? squarer6 += 0 + carry
  937. # asm 1: adc $0,<squarer6=int64#10
  938. # asm 2: adc $0,<squarer6=%r12
  939. adc $0,%r12
  940. # qhasm: squarer7 += 0 + carry
  941. # asm 1: adc $0,<squarer7=int64#2
  942. # asm 2: adc $0,<squarer7=%rsi
  943. adc $0,%rsi
  944. # qhasm: squarerax = t13_stack
  945. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#7
  946. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rax
  947. movq 80(%rsp),%rax
  948. # qhasm: (uint128) squarerdx squarerax = squarerax * t13_stack
  949. # asm 1: mulq <t13_stack=stack64#11
  950. # asm 2: mulq <t13_stack=80(%rsp)
  951. mulq 80(%rsp)
  952. # qhasm: carry? squarer6 += squarerax
  953. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  954. # asm 2: add <squarerax=%rax,<squarer6=%r12
  955. add %rax,%r12
  956. # qhasm: squarer7 += squarerdx + carry
  957. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  958. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  959. adc %rdx,%rsi
  960. # qhasm: squarerax = squarer4
  961. # asm 1: mov <squarer4=int64#8,>squarerax=int64#7
  962. # asm 2: mov <squarer4=%r10,>squarerax=%rax
  963. mov %r10,%rax
  964. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  965. mulq crypto_scalarmult_curve25519_amd64_64_38
  966. # qhasm: squarer4 = squarerax
  967. # asm 1: mov <squarerax=int64#7,>squarer4=int64#8
  968. # asm 2: mov <squarerax=%rax,>squarer4=%r10
  969. mov %rax,%r10
  970. # qhasm: squarerax = squarer5
  971. # asm 1: mov <squarer5=int64#9,>squarerax=int64#7
  972. # asm 2: mov <squarer5=%r11,>squarerax=%rax
  973. mov %r11,%rax
  974. # qhasm: squarer5 = squarerdx
  975. # asm 1: mov <squarerdx=int64#3,>squarer5=int64#9
  976. # asm 2: mov <squarerdx=%rdx,>squarer5=%r11
  977. mov %rdx,%r11
  978. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  979. mulq crypto_scalarmult_curve25519_amd64_64_38
  980. # qhasm: carry? squarer5 += squarerax
  981. # asm 1: add <squarerax=int64#7,<squarer5=int64#9
  982. # asm 2: add <squarerax=%rax,<squarer5=%r11
  983. add %rax,%r11
  984. # qhasm: squarerax = squarer6
  985. # asm 1: mov <squarer6=int64#10,>squarerax=int64#7
  986. # asm 2: mov <squarer6=%r12,>squarerax=%rax
  987. mov %r12,%rax
  988. # qhasm: squarer6 = 0
  989. # asm 1: mov $0,>squarer6=int64#10
  990. # asm 2: mov $0,>squarer6=%r12
  991. mov $0,%r12
  992. # qhasm: squarer6 += squarerdx + carry
  993. # asm 1: adc <squarerdx=int64#3,<squarer6=int64#10
  994. # asm 2: adc <squarerdx=%rdx,<squarer6=%r12
  995. adc %rdx,%r12
  996. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  997. mulq crypto_scalarmult_curve25519_amd64_64_38
  998. # qhasm: carry? squarer6 += squarerax
  999. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  1000. # asm 2: add <squarerax=%rax,<squarer6=%r12
  1001. add %rax,%r12
  1002. # qhasm: squarerax = squarer7
  1003. # asm 1: mov <squarer7=int64#2,>squarerax=int64#7
  1004. # asm 2: mov <squarer7=%rsi,>squarerax=%rax
  1005. mov %rsi,%rax
  1006. # qhasm: squarer7 = 0
  1007. # asm 1: mov $0,>squarer7=int64#2
  1008. # asm 2: mov $0,>squarer7=%rsi
  1009. mov $0,%rsi
  1010. # qhasm: squarer7 += squarerdx + carry
  1011. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  1012. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  1013. adc %rdx,%rsi
  1014. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  1015. mulq crypto_scalarmult_curve25519_amd64_64_38
  1016. # qhasm: carry? squarer7 += squarerax
  1017. # asm 1: add <squarerax=int64#7,<squarer7=int64#2
  1018. # asm 2: add <squarerax=%rax,<squarer7=%rsi
  1019. add %rax,%rsi
  1020. # qhasm: squarer8 = 0
  1021. # asm 1: mov $0,>squarer8=int64#7
  1022. # asm 2: mov $0,>squarer8=%rax
  1023. mov $0,%rax
  1024. # qhasm: squarer8 += squarerdx + carry
  1025. # asm 1: adc <squarerdx=int64#3,<squarer8=int64#7
  1026. # asm 2: adc <squarerdx=%rdx,<squarer8=%rax
  1027. adc %rdx,%rax
  1028. # qhasm: carry? t60 += squarer4
  1029. # asm 1: add <squarer4=int64#8,<t60=int64#11
  1030. # asm 2: add <squarer4=%r10,<t60=%r13
  1031. add %r10,%r13
  1032. # qhasm: carry? t61 += squarer5 + carry
  1033. # asm 1: adc <squarer5=int64#9,<t61=int64#4
  1034. # asm 2: adc <squarer5=%r11,<t61=%rcx
  1035. adc %r11,%rcx
  1036. # qhasm: carry? t62 += squarer6 + carry
  1037. # asm 1: adc <squarer6=int64#10,<t62=int64#5
  1038. # asm 2: adc <squarer6=%r12,<t62=%r8
  1039. adc %r12,%r8
  1040. # qhasm: carry? t63 += squarer7 + carry
  1041. # asm 1: adc <squarer7=int64#2,<t63=int64#6
  1042. # asm 2: adc <squarer7=%rsi,<t63=%r9
  1043. adc %rsi,%r9
  1044. # qhasm: squarezero = 0
  1045. # asm 1: mov $0,>squarezero=int64#2
  1046. # asm 2: mov $0,>squarezero=%rsi
  1047. mov $0,%rsi
  1048. # qhasm: squarer8 += squarezero + carry
  1049. # asm 1: adc <squarezero=int64#2,<squarer8=int64#7
  1050. # asm 2: adc <squarezero=%rsi,<squarer8=%rax
  1051. adc %rsi,%rax
  1052. # qhasm: squarer8 *= 38
  1053. # asm 1: imulq $38,<squarer8=int64#7,>squarer8=int64#3
  1054. # asm 2: imulq $38,<squarer8=%rax,>squarer8=%rdx
  1055. imulq $38,%rax,%rdx
  1056. # qhasm: carry? t60 += squarer8
  1057. # asm 1: add <squarer8=int64#3,<t60=int64#11
  1058. # asm 2: add <squarer8=%rdx,<t60=%r13
  1059. add %rdx,%r13
  1060. # qhasm: carry? t61 += squarezero + carry
  1061. # asm 1: adc <squarezero=int64#2,<t61=int64#4
  1062. # asm 2: adc <squarezero=%rsi,<t61=%rcx
  1063. adc %rsi,%rcx
  1064. # qhasm: carry? t62 += squarezero + carry
  1065. # asm 1: adc <squarezero=int64#2,<t62=int64#5
  1066. # asm 2: adc <squarezero=%rsi,<t62=%r8
  1067. adc %rsi,%r8
  1068. # qhasm: carry? t63 += squarezero + carry
  1069. # asm 1: adc <squarezero=int64#2,<t63=int64#6
  1070. # asm 2: adc <squarezero=%rsi,<t63=%r9
  1071. adc %rsi,%r9
  1072. # qhasm: squarezero += squarezero + carry
  1073. # asm 1: adc <squarezero=int64#2,<squarezero=int64#2
  1074. # asm 2: adc <squarezero=%rsi,<squarezero=%rsi
  1075. adc %rsi,%rsi
  1076. # qhasm: squarezero *= 38
  1077. # asm 1: imulq $38,<squarezero=int64#2,>squarezero=int64#2
  1078. # asm 2: imulq $38,<squarezero=%rsi,>squarezero=%rsi
  1079. imulq $38,%rsi,%rsi
  1080. # qhasm: t60 += squarezero
  1081. # asm 1: add <squarezero=int64#2,<t60=int64#11
  1082. # asm 2: add <squarezero=%rsi,<t60=%r13
  1083. add %rsi,%r13
  1084. # qhasm: t60_stack = t60
  1085. # asm 1: movq <t60=int64#11,>t60_stack=stack64#20
  1086. # asm 2: movq <t60=%r13,>t60_stack=152(%rsp)
  1087. movq %r13,152(%rsp)
  1088. # qhasm: t61_stack = t61
  1089. # asm 1: movq <t61=int64#4,>t61_stack=stack64#21
  1090. # asm 2: movq <t61=%rcx,>t61_stack=160(%rsp)
  1091. movq %rcx,160(%rsp)
  1092. # qhasm: t62_stack = t62
  1093. # asm 1: movq <t62=int64#5,>t62_stack=stack64#22
  1094. # asm 2: movq <t62=%r8,>t62_stack=168(%rsp)
  1095. movq %r8,168(%rsp)
  1096. # qhasm: t63_stack = t63
  1097. # asm 1: movq <t63=int64#6,>t63_stack=stack64#23
  1098. # asm 2: movq <t63=%r9,>t63_stack=176(%rsp)
  1099. movq %r9,176(%rsp)
  1100. # qhasm: t50 = t60
  1101. # asm 1: mov <t60=int64#11,>t50=int64#2
  1102. # asm 2: mov <t60=%r13,>t50=%rsi
  1103. mov %r13,%rsi
  1104. # qhasm: t51 = t61
  1105. # asm 1: mov <t61=int64#4,>t51=int64#3
  1106. # asm 2: mov <t61=%rcx,>t51=%rdx
  1107. mov %rcx,%rdx
  1108. # qhasm: t52 = t62
  1109. # asm 1: mov <t62=int64#5,>t52=int64#4
  1110. # asm 2: mov <t62=%r8,>t52=%rcx
  1111. mov %r8,%rcx
  1112. # qhasm: t53 = t63
  1113. # asm 1: mov <t63=int64#6,>t53=int64#5
  1114. # asm 2: mov <t63=%r9,>t53=%r8
  1115. mov %r9,%r8
  1116. # qhasm: carry? t50 -= t70_stack
  1117. # asm 1: subq <t70_stack=stack64#16,<t50=int64#2
  1118. # asm 2: subq <t70_stack=120(%rsp),<t50=%rsi
  1119. subq 120(%rsp),%rsi
  1120. # qhasm: carry? t51 -= t71_stack - carry
  1121. # asm 1: sbbq <t71_stack=stack64#17,<t51=int64#3
  1122. # asm 2: sbbq <t71_stack=128(%rsp),<t51=%rdx
  1123. sbbq 128(%rsp),%rdx
  1124. # qhasm: carry? t52 -= t72_stack - carry
  1125. # asm 1: sbbq <t72_stack=stack64#18,<t52=int64#4
  1126. # asm 2: sbbq <t72_stack=136(%rsp),<t52=%rcx
  1127. sbbq 136(%rsp),%rcx
  1128. # qhasm: carry? t53 -= t73_stack - carry
  1129. # asm 1: sbbq <t73_stack=stack64#19,<t53=int64#5
  1130. # asm 2: sbbq <t73_stack=144(%rsp),<t53=%r8
  1131. sbbq 144(%rsp),%r8
  1132. # qhasm: subt0 = 0
  1133. # asm 1: mov $0,>subt0=int64#6
  1134. # asm 2: mov $0,>subt0=%r9
  1135. mov $0,%r9
  1136. # qhasm: subt1 = 38
  1137. # asm 1: mov $38,>subt1=int64#7
  1138. # asm 2: mov $38,>subt1=%rax
  1139. mov $38,%rax
  1140. # qhasm: subt1 = subt0 if !carry
  1141. # asm 1: cmovae <subt0=int64#6,<subt1=int64#7
  1142. # asm 2: cmovae <subt0=%r9,<subt1=%rax
  1143. cmovae %r9,%rax
  1144. # qhasm: carry? t50 -= subt1
  1145. # asm 1: sub <subt1=int64#7,<t50=int64#2
  1146. # asm 2: sub <subt1=%rax,<t50=%rsi
  1147. sub %rax,%rsi
  1148. # qhasm: carry? t51 -= subt0 - carry
  1149. # asm 1: sbb <subt0=int64#6,<t51=int64#3
  1150. # asm 2: sbb <subt0=%r9,<t51=%rdx
  1151. sbb %r9,%rdx
  1152. # qhasm: carry? t52 -= subt0 - carry
  1153. # asm 1: sbb <subt0=int64#6,<t52=int64#4
  1154. # asm 2: sbb <subt0=%r9,<t52=%rcx
  1155. sbb %r9,%rcx
  1156. # qhasm: carry? t53 -= subt0 - carry
  1157. # asm 1: sbb <subt0=int64#6,<t53=int64#5
  1158. # asm 2: sbb <subt0=%r9,<t53=%r8
  1159. sbb %r9,%r8
  1160. # qhasm: subt0 = subt1 if carry
  1161. # asm 1: cmovc <subt1=int64#7,<subt0=int64#6
  1162. # asm 2: cmovc <subt1=%rax,<subt0=%r9
  1163. cmovc %rax,%r9
  1164. # qhasm: t50 -= subt0
  1165. # asm 1: sub <subt0=int64#6,<t50=int64#2
  1166. # asm 2: sub <subt0=%r9,<t50=%rsi
  1167. sub %r9,%rsi
  1168. # qhasm: t50_stack = t50
  1169. # asm 1: movq <t50=int64#2,>t50_stack=stack64#24
  1170. # asm 2: movq <t50=%rsi,>t50_stack=184(%rsp)
  1171. movq %rsi,184(%rsp)
  1172. # qhasm: t51_stack = t51
  1173. # asm 1: movq <t51=int64#3,>t51_stack=stack64#25
  1174. # asm 2: movq <t51=%rdx,>t51_stack=192(%rsp)
  1175. movq %rdx,192(%rsp)
  1176. # qhasm: t52_stack = t52
  1177. # asm 1: movq <t52=int64#4,>t52_stack=stack64#26
  1178. # asm 2: movq <t52=%rcx,>t52_stack=200(%rsp)
  1179. movq %rcx,200(%rsp)
  1180. # qhasm: t53_stack = t53
  1181. # asm 1: movq <t53=int64#5,>t53_stack=stack64#27
  1182. # asm 2: movq <t53=%r8,>t53_stack=208(%rsp)
  1183. movq %r8,208(%rsp)
  1184. # qhasm: t30 = *(uint64 *)(workp + 96)
  1185. # asm 1: movq 96(<workp=int64#1),>t30=int64#2
  1186. # asm 2: movq 96(<workp=%rdi),>t30=%rsi
  1187. movq 96(%rdi),%rsi
  1188. # qhasm: t31 = *(uint64 *)(workp + 104)
  1189. # asm 1: movq 104(<workp=int64#1),>t31=int64#3
  1190. # asm 2: movq 104(<workp=%rdi),>t31=%rdx
  1191. movq 104(%rdi),%rdx
  1192. # qhasm: t32 = *(uint64 *)(workp + 112)
  1193. # asm 1: movq 112(<workp=int64#1),>t32=int64#4
  1194. # asm 2: movq 112(<workp=%rdi),>t32=%rcx
  1195. movq 112(%rdi),%rcx
  1196. # qhasm: t33 = *(uint64 *)(workp + 120)
  1197. # asm 1: movq 120(<workp=int64#1),>t33=int64#5
  1198. # asm 2: movq 120(<workp=%rdi),>t33=%r8
  1199. movq 120(%rdi),%r8
  1200. # qhasm: t40 = t30
  1201. # asm 1: mov <t30=int64#2,>t40=int64#6
  1202. # asm 2: mov <t30=%rsi,>t40=%r9
  1203. mov %rsi,%r9
  1204. # qhasm: t41 = t31
  1205. # asm 1: mov <t31=int64#3,>t41=int64#7
  1206. # asm 2: mov <t31=%rdx,>t41=%rax
  1207. mov %rdx,%rax
  1208. # qhasm: t42 = t32
  1209. # asm 1: mov <t32=int64#4,>t42=int64#8
  1210. # asm 2: mov <t32=%rcx,>t42=%r10
  1211. mov %rcx,%r10
  1212. # qhasm: t43 = t33
  1213. # asm 1: mov <t33=int64#5,>t43=int64#9
  1214. # asm 2: mov <t33=%r8,>t43=%r11
  1215. mov %r8,%r11
  1216. # qhasm: carry? t30 += *(uint64 *)(workp + 128)
  1217. # asm 1: addq 128(<workp=int64#1),<t30=int64#2
  1218. # asm 2: addq 128(<workp=%rdi),<t30=%rsi
  1219. addq 128(%rdi),%rsi
  1220. # qhasm: carry? t31 += *(uint64 *)(workp + 136) + carry
  1221. # asm 1: adcq 136(<workp=int64#1),<t31=int64#3
  1222. # asm 2: adcq 136(<workp=%rdi),<t31=%rdx
  1223. adcq 136(%rdi),%rdx
  1224. # qhasm: carry? t32 += *(uint64 *)(workp + 144) + carry
  1225. # asm 1: adcq 144(<workp=int64#1),<t32=int64#4
  1226. # asm 2: adcq 144(<workp=%rdi),<t32=%rcx
  1227. adcq 144(%rdi),%rcx
  1228. # qhasm: carry? t33 += *(uint64 *)(workp + 152) + carry
  1229. # asm 1: adcq 152(<workp=int64#1),<t33=int64#5
  1230. # asm 2: adcq 152(<workp=%rdi),<t33=%r8
  1231. adcq 152(%rdi),%r8
  1232. # qhasm: addt0 = 0
  1233. # asm 1: mov $0,>addt0=int64#10
  1234. # asm 2: mov $0,>addt0=%r12
  1235. mov $0,%r12
  1236. # qhasm: addt1 = 38
  1237. # asm 1: mov $38,>addt1=int64#11
  1238. # asm 2: mov $38,>addt1=%r13
  1239. mov $38,%r13
  1240. # qhasm: addt1 = addt0 if !carry
  1241. # asm 1: cmovae <addt0=int64#10,<addt1=int64#11
  1242. # asm 2: cmovae <addt0=%r12,<addt1=%r13
  1243. cmovae %r12,%r13
  1244. # qhasm: carry? t30 += addt1
  1245. # asm 1: add <addt1=int64#11,<t30=int64#2
  1246. # asm 2: add <addt1=%r13,<t30=%rsi
  1247. add %r13,%rsi
  1248. # qhasm: carry? t31 += addt0 + carry
  1249. # asm 1: adc <addt0=int64#10,<t31=int64#3
  1250. # asm 2: adc <addt0=%r12,<t31=%rdx
  1251. adc %r12,%rdx
  1252. # qhasm: carry? t32 += addt0 + carry
  1253. # asm 1: adc <addt0=int64#10,<t32=int64#4
  1254. # asm 2: adc <addt0=%r12,<t32=%rcx
  1255. adc %r12,%rcx
  1256. # qhasm: carry? t33 += addt0 + carry
  1257. # asm 1: adc <addt0=int64#10,<t33=int64#5
  1258. # asm 2: adc <addt0=%r12,<t33=%r8
  1259. adc %r12,%r8
  1260. # qhasm: addt0 = addt1 if carry
  1261. # asm 1: cmovc <addt1=int64#11,<addt0=int64#10
  1262. # asm 2: cmovc <addt1=%r13,<addt0=%r12
  1263. cmovc %r13,%r12
  1264. # qhasm: t30 += addt0
  1265. # asm 1: add <addt0=int64#10,<t30=int64#2
  1266. # asm 2: add <addt0=%r12,<t30=%rsi
  1267. add %r12,%rsi
  1268. # qhasm: carry? t40 -= *(uint64 *)(workp + 128)
  1269. # asm 1: subq 128(<workp=int64#1),<t40=int64#6
  1270. # asm 2: subq 128(<workp=%rdi),<t40=%r9
  1271. subq 128(%rdi),%r9
  1272. # qhasm: carry? t41 -= *(uint64 *)(workp + 136) - carry
  1273. # asm 1: sbbq 136(<workp=int64#1),<t41=int64#7
  1274. # asm 2: sbbq 136(<workp=%rdi),<t41=%rax
  1275. sbbq 136(%rdi),%rax
  1276. # qhasm: carry? t42 -= *(uint64 *)(workp + 144) - carry
  1277. # asm 1: sbbq 144(<workp=int64#1),<t42=int64#8
  1278. # asm 2: sbbq 144(<workp=%rdi),<t42=%r10
  1279. sbbq 144(%rdi),%r10
  1280. # qhasm: carry? t43 -= *(uint64 *)(workp + 152) - carry
  1281. # asm 1: sbbq 152(<workp=int64#1),<t43=int64#9
  1282. # asm 2: sbbq 152(<workp=%rdi),<t43=%r11
  1283. sbbq 152(%rdi),%r11
  1284. # qhasm: subt0 = 0
  1285. # asm 1: mov $0,>subt0=int64#10
  1286. # asm 2: mov $0,>subt0=%r12
  1287. mov $0,%r12
  1288. # qhasm: subt1 = 38
  1289. # asm 1: mov $38,>subt1=int64#11
  1290. # asm 2: mov $38,>subt1=%r13
  1291. mov $38,%r13
  1292. # qhasm: subt1 = subt0 if !carry
  1293. # asm 1: cmovae <subt0=int64#10,<subt1=int64#11
  1294. # asm 2: cmovae <subt0=%r12,<subt1=%r13
  1295. cmovae %r12,%r13
  1296. # qhasm: carry? t40 -= subt1
  1297. # asm 1: sub <subt1=int64#11,<t40=int64#6
  1298. # asm 2: sub <subt1=%r13,<t40=%r9
  1299. sub %r13,%r9
  1300. # qhasm: carry? t41 -= subt0 - carry
  1301. # asm 1: sbb <subt0=int64#10,<t41=int64#7
  1302. # asm 2: sbb <subt0=%r12,<t41=%rax
  1303. sbb %r12,%rax
  1304. # qhasm: carry? t42 -= subt0 - carry
  1305. # asm 1: sbb <subt0=int64#10,<t42=int64#8
  1306. # asm 2: sbb <subt0=%r12,<t42=%r10
  1307. sbb %r12,%r10
  1308. # qhasm: carry? t43 -= subt0 - carry
  1309. # asm 1: sbb <subt0=int64#10,<t43=int64#9
  1310. # asm 2: sbb <subt0=%r12,<t43=%r11
  1311. sbb %r12,%r11
  1312. # qhasm: subt0 = subt1 if carry
  1313. # asm 1: cmovc <subt1=int64#11,<subt0=int64#10
  1314. # asm 2: cmovc <subt1=%r13,<subt0=%r12
  1315. cmovc %r13,%r12
  1316. # qhasm: t40 -= subt0
  1317. # asm 1: sub <subt0=int64#10,<t40=int64#6
  1318. # asm 2: sub <subt0=%r12,<t40=%r9
  1319. sub %r12,%r9
  1320. # qhasm: t30_stack = t30
  1321. # asm 1: movq <t30=int64#2,>t30_stack=stack64#28
  1322. # asm 2: movq <t30=%rsi,>t30_stack=216(%rsp)
  1323. movq %rsi,216(%rsp)
  1324. # qhasm: t31_stack = t31
  1325. # asm 1: movq <t31=int64#3,>t31_stack=stack64#29
  1326. # asm 2: movq <t31=%rdx,>t31_stack=224(%rsp)
  1327. movq %rdx,224(%rsp)
  1328. # qhasm: t32_stack = t32
  1329. # asm 1: movq <t32=int64#4,>t32_stack=stack64#30
  1330. # asm 2: movq <t32=%rcx,>t32_stack=232(%rsp)
  1331. movq %rcx,232(%rsp)
  1332. # qhasm: t33_stack = t33
  1333. # asm 1: movq <t33=int64#5,>t33_stack=stack64#31
  1334. # asm 2: movq <t33=%r8,>t33_stack=240(%rsp)
  1335. movq %r8,240(%rsp)
  1336. # qhasm: t40_stack = t40
  1337. # asm 1: movq <t40=int64#6,>t40_stack=stack64#32
  1338. # asm 2: movq <t40=%r9,>t40_stack=248(%rsp)
  1339. movq %r9,248(%rsp)
  1340. # qhasm: t41_stack = t41
  1341. # asm 1: movq <t41=int64#7,>t41_stack=stack64#33
  1342. # asm 2: movq <t41=%rax,>t41_stack=256(%rsp)
  1343. movq %rax,256(%rsp)
  1344. # qhasm: t42_stack = t42
  1345. # asm 1: movq <t42=int64#8,>t42_stack=stack64#34
  1346. # asm 2: movq <t42=%r10,>t42_stack=264(%rsp)
  1347. movq %r10,264(%rsp)
  1348. # qhasm: t43_stack = t43
  1349. # asm 1: movq <t43=int64#9,>t43_stack=stack64#35
  1350. # asm 2: movq <t43=%r11,>t43_stack=272(%rsp)
  1351. movq %r11,272(%rsp)
  1352. # qhasm: mulr4 = 0
  1353. # asm 1: mov $0,>mulr4=int64#2
  1354. # asm 2: mov $0,>mulr4=%rsi
  1355. mov $0,%rsi
  1356. # qhasm: mulr5 = 0
  1357. # asm 1: mov $0,>mulr5=int64#4
  1358. # asm 2: mov $0,>mulr5=%rcx
  1359. mov $0,%rcx
  1360. # qhasm: mulr6 = 0
  1361. # asm 1: mov $0,>mulr6=int64#5
  1362. # asm 2: mov $0,>mulr6=%r8
  1363. mov $0,%r8
  1364. # qhasm: mulr7 = 0
  1365. # asm 1: mov $0,>mulr7=int64#6
  1366. # asm 2: mov $0,>mulr7=%r9
  1367. mov $0,%r9
  1368. # qhasm: mulx0 = t30_stack
  1369. # asm 1: movq <t30_stack=stack64#28,>mulx0=int64#8
  1370. # asm 2: movq <t30_stack=216(%rsp),>mulx0=%r10
  1371. movq 216(%rsp),%r10
  1372. # qhasm: mulrax = t20_stack
  1373. # asm 1: movq <t20_stack=stack64#12,>mulrax=int64#7
  1374. # asm 2: movq <t20_stack=88(%rsp),>mulrax=%rax
  1375. movq 88(%rsp),%rax
  1376. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1377. # asm 1: mul <mulx0=int64#8
  1378. # asm 2: mul <mulx0=%r10
  1379. mul %r10
  1380. # qhasm: t90 = mulrax
  1381. # asm 1: mov <mulrax=int64#7,>t90=int64#9
  1382. # asm 2: mov <mulrax=%rax,>t90=%r11
  1383. mov %rax,%r11
  1384. # qhasm: t91 = mulrdx
  1385. # asm 1: mov <mulrdx=int64#3,>t91=int64#10
  1386. # asm 2: mov <mulrdx=%rdx,>t91=%r12
  1387. mov %rdx,%r12
  1388. # qhasm: mulrax = t21_stack
  1389. # asm 1: movq <t21_stack=stack64#13,>mulrax=int64#7
  1390. # asm 2: movq <t21_stack=96(%rsp),>mulrax=%rax
  1391. movq 96(%rsp),%rax
  1392. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1393. # asm 1: mul <mulx0=int64#8
  1394. # asm 2: mul <mulx0=%r10
  1395. mul %r10
  1396. # qhasm: carry? t91 += mulrax
  1397. # asm 1: add <mulrax=int64#7,<t91=int64#10
  1398. # asm 2: add <mulrax=%rax,<t91=%r12
  1399. add %rax,%r12
  1400. # qhasm: t92 = 0
  1401. # asm 1: mov $0,>t92=int64#11
  1402. # asm 2: mov $0,>t92=%r13
  1403. mov $0,%r13
  1404. # qhasm: t92 += mulrdx + carry
  1405. # asm 1: adc <mulrdx=int64#3,<t92=int64#11
  1406. # asm 2: adc <mulrdx=%rdx,<t92=%r13
  1407. adc %rdx,%r13
  1408. # qhasm: mulrax = t22_stack
  1409. # asm 1: movq <t22_stack=stack64#14,>mulrax=int64#7
  1410. # asm 2: movq <t22_stack=104(%rsp),>mulrax=%rax
  1411. movq 104(%rsp),%rax
  1412. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1413. # asm 1: mul <mulx0=int64#8
  1414. # asm 2: mul <mulx0=%r10
  1415. mul %r10
  1416. # qhasm: carry? t92 += mulrax
  1417. # asm 1: add <mulrax=int64#7,<t92=int64#11
  1418. # asm 2: add <mulrax=%rax,<t92=%r13
  1419. add %rax,%r13
  1420. # qhasm: t93 = 0
  1421. # asm 1: mov $0,>t93=int64#12
  1422. # asm 2: mov $0,>t93=%r14
  1423. mov $0,%r14
  1424. # qhasm: t93 += mulrdx + carry
  1425. # asm 1: adc <mulrdx=int64#3,<t93=int64#12
  1426. # asm 2: adc <mulrdx=%rdx,<t93=%r14
  1427. adc %rdx,%r14
  1428. # qhasm: mulrax = t23_stack
  1429. # asm 1: movq <t23_stack=stack64#15,>mulrax=int64#7
  1430. # asm 2: movq <t23_stack=112(%rsp),>mulrax=%rax
  1431. movq 112(%rsp),%rax
  1432. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1433. # asm 1: mul <mulx0=int64#8
  1434. # asm 2: mul <mulx0=%r10
  1435. mul %r10
  1436. # qhasm: carry? t93 += mulrax
  1437. # asm 1: add <mulrax=int64#7,<t93=int64#12
  1438. # asm 2: add <mulrax=%rax,<t93=%r14
  1439. add %rax,%r14
  1440. # qhasm: mulr4 += mulrdx + carry
  1441. # asm 1: adc <mulrdx=int64#3,<mulr4=int64#2
  1442. # asm 2: adc <mulrdx=%rdx,<mulr4=%rsi
  1443. adc %rdx,%rsi
  1444. # qhasm: mulx1 = t31_stack
  1445. # asm 1: movq <t31_stack=stack64#29,>mulx1=int64#8
  1446. # asm 2: movq <t31_stack=224(%rsp),>mulx1=%r10
  1447. movq 224(%rsp),%r10
  1448. # qhasm: mulrax = t20_stack
  1449. # asm 1: movq <t20_stack=stack64#12,>mulrax=int64#7
  1450. # asm 2: movq <t20_stack=88(%rsp),>mulrax=%rax
  1451. movq 88(%rsp),%rax
  1452. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  1453. # asm 1: mul <mulx1=int64#8
  1454. # asm 2: mul <mulx1=%r10
  1455. mul %r10
  1456. # qhasm: carry? t91 += mulrax
  1457. # asm 1: add <mulrax=int64#7,<t91=int64#10
  1458. # asm 2: add <mulrax=%rax,<t91=%r12
  1459. add %rax,%r12
  1460. # qhasm: mulc = 0
  1461. # asm 1: mov $0,>mulc=int64#13
  1462. # asm 2: mov $0,>mulc=%r15
  1463. mov $0,%r15
  1464. # qhasm: mulc += mulrdx + carry
  1465. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1466. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1467. adc %rdx,%r15
  1468. # qhasm: mulrax = t21_stack
  1469. # asm 1: movq <t21_stack=stack64#13,>mulrax=int64#7
  1470. # asm 2: movq <t21_stack=96(%rsp),>mulrax=%rax
  1471. movq 96(%rsp),%rax
  1472. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  1473. # asm 1: mul <mulx1=int64#8
  1474. # asm 2: mul <mulx1=%r10
  1475. mul %r10
  1476. # qhasm: carry? t92 += mulrax
  1477. # asm 1: add <mulrax=int64#7,<t92=int64#11
  1478. # asm 2: add <mulrax=%rax,<t92=%r13
  1479. add %rax,%r13
  1480. # qhasm: mulrdx += 0 + carry
  1481. # asm 1: adc $0,<mulrdx=int64#3
  1482. # asm 2: adc $0,<mulrdx=%rdx
  1483. adc $0,%rdx
  1484. # qhasm: carry? t92 += mulc
  1485. # asm 1: add <mulc=int64#13,<t92=int64#11
  1486. # asm 2: add <mulc=%r15,<t92=%r13
  1487. add %r15,%r13
  1488. # qhasm: mulc = 0
  1489. # asm 1: mov $0,>mulc=int64#13
  1490. # asm 2: mov $0,>mulc=%r15
  1491. mov $0,%r15
  1492. # qhasm: mulc += mulrdx + carry
  1493. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1494. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1495. adc %rdx,%r15
  1496. # qhasm: mulrax = t22_stack
  1497. # asm 1: movq <t22_stack=stack64#14,>mulrax=int64#7
  1498. # asm 2: movq <t22_stack=104(%rsp),>mulrax=%rax
  1499. movq 104(%rsp),%rax
  1500. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  1501. # asm 1: mul <mulx1=int64#8
  1502. # asm 2: mul <mulx1=%r10
  1503. mul %r10
  1504. # qhasm: carry? t93 += mulrax
  1505. # asm 1: add <mulrax=int64#7,<t93=int64#12
  1506. # asm 2: add <mulrax=%rax,<t93=%r14
  1507. add %rax,%r14
  1508. # qhasm: mulrdx += 0 + carry
  1509. # asm 1: adc $0,<mulrdx=int64#3
  1510. # asm 2: adc $0,<mulrdx=%rdx
  1511. adc $0,%rdx
  1512. # qhasm: carry? t93 += mulc
  1513. # asm 1: add <mulc=int64#13,<t93=int64#12
  1514. # asm 2: add <mulc=%r15,<t93=%r14
  1515. add %r15,%r14
  1516. # qhasm: mulc = 0
  1517. # asm 1: mov $0,>mulc=int64#13
  1518. # asm 2: mov $0,>mulc=%r15
  1519. mov $0,%r15
  1520. # qhasm: mulc += mulrdx + carry
  1521. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1522. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1523. adc %rdx,%r15
  1524. # qhasm: mulrax = t23_stack
  1525. # asm 1: movq <t23_stack=stack64#15,>mulrax=int64#7
  1526. # asm 2: movq <t23_stack=112(%rsp),>mulrax=%rax
  1527. movq 112(%rsp),%rax
  1528. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  1529. # asm 1: mul <mulx1=int64#8
  1530. # asm 2: mul <mulx1=%r10
  1531. mul %r10
  1532. # qhasm: carry? mulr4 += mulrax
  1533. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  1534. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  1535. add %rax,%rsi
  1536. # qhasm: mulrdx += 0 + carry
  1537. # asm 1: adc $0,<mulrdx=int64#3
  1538. # asm 2: adc $0,<mulrdx=%rdx
  1539. adc $0,%rdx
  1540. # qhasm: carry? mulr4 += mulc
  1541. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  1542. # asm 2: add <mulc=%r15,<mulr4=%rsi
  1543. add %r15,%rsi
  1544. # qhasm: mulr5 += mulrdx + carry
  1545. # asm 1: adc <mulrdx=int64#3,<mulr5=int64#4
  1546. # asm 2: adc <mulrdx=%rdx,<mulr5=%rcx
  1547. adc %rdx,%rcx
  1548. # qhasm: mulx2 = t32_stack
  1549. # asm 1: movq <t32_stack=stack64#30,>mulx2=int64#8
  1550. # asm 2: movq <t32_stack=232(%rsp),>mulx2=%r10
  1551. movq 232(%rsp),%r10
  1552. # qhasm: mulrax = t20_stack
  1553. # asm 1: movq <t20_stack=stack64#12,>mulrax=int64#7
  1554. # asm 2: movq <t20_stack=88(%rsp),>mulrax=%rax
  1555. movq 88(%rsp),%rax
  1556. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  1557. # asm 1: mul <mulx2=int64#8
  1558. # asm 2: mul <mulx2=%r10
  1559. mul %r10
  1560. # qhasm: carry? t92 += mulrax
  1561. # asm 1: add <mulrax=int64#7,<t92=int64#11
  1562. # asm 2: add <mulrax=%rax,<t92=%r13
  1563. add %rax,%r13
  1564. # qhasm: mulc = 0
  1565. # asm 1: mov $0,>mulc=int64#13
  1566. # asm 2: mov $0,>mulc=%r15
  1567. mov $0,%r15
  1568. # qhasm: mulc += mulrdx + carry
  1569. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1570. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1571. adc %rdx,%r15
  1572. # qhasm: mulrax = t21_stack
  1573. # asm 1: movq <t21_stack=stack64#13,>mulrax=int64#7
  1574. # asm 2: movq <t21_stack=96(%rsp),>mulrax=%rax
  1575. movq 96(%rsp),%rax
  1576. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  1577. # asm 1: mul <mulx2=int64#8
  1578. # asm 2: mul <mulx2=%r10
  1579. mul %r10
  1580. # qhasm: carry? t93 += mulrax
  1581. # asm 1: add <mulrax=int64#7,<t93=int64#12
  1582. # asm 2: add <mulrax=%rax,<t93=%r14
  1583. add %rax,%r14
  1584. # qhasm: mulrdx += 0 + carry
  1585. # asm 1: adc $0,<mulrdx=int64#3
  1586. # asm 2: adc $0,<mulrdx=%rdx
  1587. adc $0,%rdx
  1588. # qhasm: carry? t93 += mulc
  1589. # asm 1: add <mulc=int64#13,<t93=int64#12
  1590. # asm 2: add <mulc=%r15,<t93=%r14
  1591. add %r15,%r14
  1592. # qhasm: mulc = 0
  1593. # asm 1: mov $0,>mulc=int64#13
  1594. # asm 2: mov $0,>mulc=%r15
  1595. mov $0,%r15
  1596. # qhasm: mulc += mulrdx + carry
  1597. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1598. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1599. adc %rdx,%r15
  1600. # qhasm: mulrax = t22_stack
  1601. # asm 1: movq <t22_stack=stack64#14,>mulrax=int64#7
  1602. # asm 2: movq <t22_stack=104(%rsp),>mulrax=%rax
  1603. movq 104(%rsp),%rax
  1604. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  1605. # asm 1: mul <mulx2=int64#8
  1606. # asm 2: mul <mulx2=%r10
  1607. mul %r10
  1608. # qhasm: carry? mulr4 += mulrax
  1609. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  1610. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  1611. add %rax,%rsi
  1612. # qhasm: mulrdx += 0 + carry
  1613. # asm 1: adc $0,<mulrdx=int64#3
  1614. # asm 2: adc $0,<mulrdx=%rdx
  1615. adc $0,%rdx
  1616. # qhasm: carry? mulr4 += mulc
  1617. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  1618. # asm 2: add <mulc=%r15,<mulr4=%rsi
  1619. add %r15,%rsi
  1620. # qhasm: mulc = 0
  1621. # asm 1: mov $0,>mulc=int64#13
  1622. # asm 2: mov $0,>mulc=%r15
  1623. mov $0,%r15
  1624. # qhasm: mulc += mulrdx + carry
  1625. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1626. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1627. adc %rdx,%r15
  1628. # qhasm: mulrax = t23_stack
  1629. # asm 1: movq <t23_stack=stack64#15,>mulrax=int64#7
  1630. # asm 2: movq <t23_stack=112(%rsp),>mulrax=%rax
  1631. movq 112(%rsp),%rax
  1632. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  1633. # asm 1: mul <mulx2=int64#8
  1634. # asm 2: mul <mulx2=%r10
  1635. mul %r10
  1636. # qhasm: carry? mulr5 += mulrax
  1637. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  1638. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  1639. add %rax,%rcx
  1640. # qhasm: mulrdx += 0 + carry
  1641. # asm 1: adc $0,<mulrdx=int64#3
  1642. # asm 2: adc $0,<mulrdx=%rdx
  1643. adc $0,%rdx
  1644. # qhasm: carry? mulr5 += mulc
  1645. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  1646. # asm 2: add <mulc=%r15,<mulr5=%rcx
  1647. add %r15,%rcx
  1648. # qhasm: mulr6 += mulrdx + carry
  1649. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  1650. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  1651. adc %rdx,%r8
  1652. # qhasm: mulx3 = t33_stack
  1653. # asm 1: movq <t33_stack=stack64#31,>mulx3=int64#8
  1654. # asm 2: movq <t33_stack=240(%rsp),>mulx3=%r10
  1655. movq 240(%rsp),%r10
  1656. # qhasm: mulrax = t20_stack
  1657. # asm 1: movq <t20_stack=stack64#12,>mulrax=int64#7
  1658. # asm 2: movq <t20_stack=88(%rsp),>mulrax=%rax
  1659. movq 88(%rsp),%rax
  1660. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  1661. # asm 1: mul <mulx3=int64#8
  1662. # asm 2: mul <mulx3=%r10
  1663. mul %r10
  1664. # qhasm: carry? t93 += mulrax
  1665. # asm 1: add <mulrax=int64#7,<t93=int64#12
  1666. # asm 2: add <mulrax=%rax,<t93=%r14
  1667. add %rax,%r14
  1668. # qhasm: mulc = 0
  1669. # asm 1: mov $0,>mulc=int64#13
  1670. # asm 2: mov $0,>mulc=%r15
  1671. mov $0,%r15
  1672. # qhasm: mulc += mulrdx + carry
  1673. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1674. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1675. adc %rdx,%r15
  1676. # qhasm: mulrax = t21_stack
  1677. # asm 1: movq <t21_stack=stack64#13,>mulrax=int64#7
  1678. # asm 2: movq <t21_stack=96(%rsp),>mulrax=%rax
  1679. movq 96(%rsp),%rax
  1680. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  1681. # asm 1: mul <mulx3=int64#8
  1682. # asm 2: mul <mulx3=%r10
  1683. mul %r10
  1684. # qhasm: carry? mulr4 += mulrax
  1685. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  1686. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  1687. add %rax,%rsi
  1688. # qhasm: mulrdx += 0 + carry
  1689. # asm 1: adc $0,<mulrdx=int64#3
  1690. # asm 2: adc $0,<mulrdx=%rdx
  1691. adc $0,%rdx
  1692. # qhasm: carry? mulr4 += mulc
  1693. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  1694. # asm 2: add <mulc=%r15,<mulr4=%rsi
  1695. add %r15,%rsi
  1696. # qhasm: mulc = 0
  1697. # asm 1: mov $0,>mulc=int64#13
  1698. # asm 2: mov $0,>mulc=%r15
  1699. mov $0,%r15
  1700. # qhasm: mulc += mulrdx + carry
  1701. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1702. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1703. adc %rdx,%r15
  1704. # qhasm: mulrax = t22_stack
  1705. # asm 1: movq <t22_stack=stack64#14,>mulrax=int64#7
  1706. # asm 2: movq <t22_stack=104(%rsp),>mulrax=%rax
  1707. movq 104(%rsp),%rax
  1708. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  1709. # asm 1: mul <mulx3=int64#8
  1710. # asm 2: mul <mulx3=%r10
  1711. mul %r10
  1712. # qhasm: carry? mulr5 += mulrax
  1713. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  1714. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  1715. add %rax,%rcx
  1716. # qhasm: mulrdx += 0 + carry
  1717. # asm 1: adc $0,<mulrdx=int64#3
  1718. # asm 2: adc $0,<mulrdx=%rdx
  1719. adc $0,%rdx
  1720. # qhasm: carry? mulr5 += mulc
  1721. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  1722. # asm 2: add <mulc=%r15,<mulr5=%rcx
  1723. add %r15,%rcx
  1724. # qhasm: mulc = 0
  1725. # asm 1: mov $0,>mulc=int64#13
  1726. # asm 2: mov $0,>mulc=%r15
  1727. mov $0,%r15
  1728. # qhasm: mulc += mulrdx + carry
  1729. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  1730. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  1731. adc %rdx,%r15
  1732. # qhasm: mulrax = t23_stack
  1733. # asm 1: movq <t23_stack=stack64#15,>mulrax=int64#7
  1734. # asm 2: movq <t23_stack=112(%rsp),>mulrax=%rax
  1735. movq 112(%rsp),%rax
  1736. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  1737. # asm 1: mul <mulx3=int64#8
  1738. # asm 2: mul <mulx3=%r10
  1739. mul %r10
  1740. # qhasm: carry? mulr6 += mulrax
  1741. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  1742. # asm 2: add <mulrax=%rax,<mulr6=%r8
  1743. add %rax,%r8
  1744. # qhasm: mulrdx += 0 + carry
  1745. # asm 1: adc $0,<mulrdx=int64#3
  1746. # asm 2: adc $0,<mulrdx=%rdx
  1747. adc $0,%rdx
  1748. # qhasm: carry? mulr6 += mulc
  1749. # asm 1: add <mulc=int64#13,<mulr6=int64#5
  1750. # asm 2: add <mulc=%r15,<mulr6=%r8
  1751. add %r15,%r8
  1752. # qhasm: mulr7 += mulrdx + carry
  1753. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  1754. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  1755. adc %rdx,%r9
  1756. # qhasm: mulrax = mulr4
  1757. # asm 1: mov <mulr4=int64#2,>mulrax=int64#7
  1758. # asm 2: mov <mulr4=%rsi,>mulrax=%rax
  1759. mov %rsi,%rax
  1760. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  1761. mulq crypto_scalarmult_curve25519_amd64_64_38
  1762. # qhasm: mulr4 = mulrax
  1763. # asm 1: mov <mulrax=int64#7,>mulr4=int64#2
  1764. # asm 2: mov <mulrax=%rax,>mulr4=%rsi
  1765. mov %rax,%rsi
  1766. # qhasm: mulrax = mulr5
  1767. # asm 1: mov <mulr5=int64#4,>mulrax=int64#7
  1768. # asm 2: mov <mulr5=%rcx,>mulrax=%rax
  1769. mov %rcx,%rax
  1770. # qhasm: mulr5 = mulrdx
  1771. # asm 1: mov <mulrdx=int64#3,>mulr5=int64#4
  1772. # asm 2: mov <mulrdx=%rdx,>mulr5=%rcx
  1773. mov %rdx,%rcx
  1774. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  1775. mulq crypto_scalarmult_curve25519_amd64_64_38
  1776. # qhasm: carry? mulr5 += mulrax
  1777. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  1778. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  1779. add %rax,%rcx
  1780. # qhasm: mulrax = mulr6
  1781. # asm 1: mov <mulr6=int64#5,>mulrax=int64#7
  1782. # asm 2: mov <mulr6=%r8,>mulrax=%rax
  1783. mov %r8,%rax
  1784. # qhasm: mulr6 = 0
  1785. # asm 1: mov $0,>mulr6=int64#5
  1786. # asm 2: mov $0,>mulr6=%r8
  1787. mov $0,%r8
  1788. # qhasm: mulr6 += mulrdx + carry
  1789. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  1790. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  1791. adc %rdx,%r8
  1792. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  1793. mulq crypto_scalarmult_curve25519_amd64_64_38
  1794. # qhasm: carry? mulr6 += mulrax
  1795. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  1796. # asm 2: add <mulrax=%rax,<mulr6=%r8
  1797. add %rax,%r8
  1798. # qhasm: mulrax = mulr7
  1799. # asm 1: mov <mulr7=int64#6,>mulrax=int64#7
  1800. # asm 2: mov <mulr7=%r9,>mulrax=%rax
  1801. mov %r9,%rax
  1802. # qhasm: mulr7 = 0
  1803. # asm 1: mov $0,>mulr7=int64#6
  1804. # asm 2: mov $0,>mulr7=%r9
  1805. mov $0,%r9
  1806. # qhasm: mulr7 += mulrdx + carry
  1807. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  1808. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  1809. adc %rdx,%r9
  1810. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  1811. mulq crypto_scalarmult_curve25519_amd64_64_38
  1812. # qhasm: carry? mulr7 += mulrax
  1813. # asm 1: add <mulrax=int64#7,<mulr7=int64#6
  1814. # asm 2: add <mulrax=%rax,<mulr7=%r9
  1815. add %rax,%r9
  1816. # qhasm: mulr8 = 0
  1817. # asm 1: mov $0,>mulr8=int64#7
  1818. # asm 2: mov $0,>mulr8=%rax
  1819. mov $0,%rax
  1820. # qhasm: mulr8 += mulrdx + carry
  1821. # asm 1: adc <mulrdx=int64#3,<mulr8=int64#7
  1822. # asm 2: adc <mulrdx=%rdx,<mulr8=%rax
  1823. adc %rdx,%rax
  1824. # qhasm: carry? t90 += mulr4
  1825. # asm 1: add <mulr4=int64#2,<t90=int64#9
  1826. # asm 2: add <mulr4=%rsi,<t90=%r11
  1827. add %rsi,%r11
  1828. # qhasm: carry? t91 += mulr5 + carry
  1829. # asm 1: adc <mulr5=int64#4,<t91=int64#10
  1830. # asm 2: adc <mulr5=%rcx,<t91=%r12
  1831. adc %rcx,%r12
  1832. # qhasm: carry? t92 += mulr6 + carry
  1833. # asm 1: adc <mulr6=int64#5,<t92=int64#11
  1834. # asm 2: adc <mulr6=%r8,<t92=%r13
  1835. adc %r8,%r13
  1836. # qhasm: carry? t93 += mulr7 + carry
  1837. # asm 1: adc <mulr7=int64#6,<t93=int64#12
  1838. # asm 2: adc <mulr7=%r9,<t93=%r14
  1839. adc %r9,%r14
  1840. # qhasm: mulzero = 0
  1841. # asm 1: mov $0,>mulzero=int64#2
  1842. # asm 2: mov $0,>mulzero=%rsi
  1843. mov $0,%rsi
  1844. # qhasm: mulr8 += mulzero + carry
  1845. # asm 1: adc <mulzero=int64#2,<mulr8=int64#7
  1846. # asm 2: adc <mulzero=%rsi,<mulr8=%rax
  1847. adc %rsi,%rax
  1848. # qhasm: mulr8 *= 38
  1849. # asm 1: imulq $38,<mulr8=int64#7,>mulr8=int64#3
  1850. # asm 2: imulq $38,<mulr8=%rax,>mulr8=%rdx
  1851. imulq $38,%rax,%rdx
  1852. # qhasm: carry? t90 += mulr8
  1853. # asm 1: add <mulr8=int64#3,<t90=int64#9
  1854. # asm 2: add <mulr8=%rdx,<t90=%r11
  1855. add %rdx,%r11
  1856. # qhasm: carry? t91 += mulzero + carry
  1857. # asm 1: adc <mulzero=int64#2,<t91=int64#10
  1858. # asm 2: adc <mulzero=%rsi,<t91=%r12
  1859. adc %rsi,%r12
  1860. # qhasm: carry? t92 += mulzero + carry
  1861. # asm 1: adc <mulzero=int64#2,<t92=int64#11
  1862. # asm 2: adc <mulzero=%rsi,<t92=%r13
  1863. adc %rsi,%r13
  1864. # qhasm: carry? t93 += mulzero + carry
  1865. # asm 1: adc <mulzero=int64#2,<t93=int64#12
  1866. # asm 2: adc <mulzero=%rsi,<t93=%r14
  1867. adc %rsi,%r14
  1868. # qhasm: mulzero += mulzero + carry
  1869. # asm 1: adc <mulzero=int64#2,<mulzero=int64#2
  1870. # asm 2: adc <mulzero=%rsi,<mulzero=%rsi
  1871. adc %rsi,%rsi
  1872. # qhasm: mulzero *= 38
  1873. # asm 1: imulq $38,<mulzero=int64#2,>mulzero=int64#2
  1874. # asm 2: imulq $38,<mulzero=%rsi,>mulzero=%rsi
  1875. imulq $38,%rsi,%rsi
  1876. # qhasm: t90 += mulzero
  1877. # asm 1: add <mulzero=int64#2,<t90=int64#9
  1878. # asm 2: add <mulzero=%rsi,<t90=%r11
  1879. add %rsi,%r11
  1880. # qhasm: t90_stack = t90
  1881. # asm 1: movq <t90=int64#9,>t90_stack=stack64#12
  1882. # asm 2: movq <t90=%r11,>t90_stack=88(%rsp)
  1883. movq %r11,88(%rsp)
  1884. # qhasm: t91_stack = t91
  1885. # asm 1: movq <t91=int64#10,>t91_stack=stack64#13
  1886. # asm 2: movq <t91=%r12,>t91_stack=96(%rsp)
  1887. movq %r12,96(%rsp)
  1888. # qhasm: t92_stack = t92
  1889. # asm 1: movq <t92=int64#11,>t92_stack=stack64#14
  1890. # asm 2: movq <t92=%r13,>t92_stack=104(%rsp)
  1891. movq %r13,104(%rsp)
  1892. # qhasm: t93_stack = t93
  1893. # asm 1: movq <t93=int64#12,>t93_stack=stack64#15
  1894. # asm 2: movq <t93=%r14,>t93_stack=112(%rsp)
  1895. movq %r14,112(%rsp)
  1896. # qhasm: mulr4 = 0
  1897. # asm 1: mov $0,>mulr4=int64#2
  1898. # asm 2: mov $0,>mulr4=%rsi
  1899. mov $0,%rsi
  1900. # qhasm: mulr5 = 0
  1901. # asm 1: mov $0,>mulr5=int64#4
  1902. # asm 2: mov $0,>mulr5=%rcx
  1903. mov $0,%rcx
  1904. # qhasm: mulr6 = 0
  1905. # asm 1: mov $0,>mulr6=int64#5
  1906. # asm 2: mov $0,>mulr6=%r8
  1907. mov $0,%r8
  1908. # qhasm: mulr7 = 0
  1909. # asm 1: mov $0,>mulr7=int64#6
  1910. # asm 2: mov $0,>mulr7=%r9
  1911. mov $0,%r9
  1912. # qhasm: mulx0 = t40_stack
  1913. # asm 1: movq <t40_stack=stack64#32,>mulx0=int64#8
  1914. # asm 2: movq <t40_stack=248(%rsp),>mulx0=%r10
  1915. movq 248(%rsp),%r10
  1916. # qhasm: mulrax = t10_stack
  1917. # asm 1: movq <t10_stack=stack64#8,>mulrax=int64#7
  1918. # asm 2: movq <t10_stack=56(%rsp),>mulrax=%rax
  1919. movq 56(%rsp),%rax
  1920. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1921. # asm 1: mul <mulx0=int64#8
  1922. # asm 2: mul <mulx0=%r10
  1923. mul %r10
  1924. # qhasm: t80 = mulrax
  1925. # asm 1: mov <mulrax=int64#7,>t80=int64#9
  1926. # asm 2: mov <mulrax=%rax,>t80=%r11
  1927. mov %rax,%r11
  1928. # qhasm: t81 = mulrdx
  1929. # asm 1: mov <mulrdx=int64#3,>t81=int64#10
  1930. # asm 2: mov <mulrdx=%rdx,>t81=%r12
  1931. mov %rdx,%r12
  1932. # qhasm: mulrax = t11_stack
  1933. # asm 1: movq <t11_stack=stack64#9,>mulrax=int64#7
  1934. # asm 2: movq <t11_stack=64(%rsp),>mulrax=%rax
  1935. movq 64(%rsp),%rax
  1936. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1937. # asm 1: mul <mulx0=int64#8
  1938. # asm 2: mul <mulx0=%r10
  1939. mul %r10
  1940. # qhasm: carry? t81 += mulrax
  1941. # asm 1: add <mulrax=int64#7,<t81=int64#10
  1942. # asm 2: add <mulrax=%rax,<t81=%r12
  1943. add %rax,%r12
  1944. # qhasm: t82 = 0
  1945. # asm 1: mov $0,>t82=int64#11
  1946. # asm 2: mov $0,>t82=%r13
  1947. mov $0,%r13
  1948. # qhasm: t82 += mulrdx + carry
  1949. # asm 1: adc <mulrdx=int64#3,<t82=int64#11
  1950. # asm 2: adc <mulrdx=%rdx,<t82=%r13
  1951. adc %rdx,%r13
  1952. # qhasm: mulrax = t12_stack
  1953. # asm 1: movq <t12_stack=stack64#10,>mulrax=int64#7
  1954. # asm 2: movq <t12_stack=72(%rsp),>mulrax=%rax
  1955. movq 72(%rsp),%rax
  1956. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1957. # asm 1: mul <mulx0=int64#8
  1958. # asm 2: mul <mulx0=%r10
  1959. mul %r10
  1960. # qhasm: carry? t82 += mulrax
  1961. # asm 1: add <mulrax=int64#7,<t82=int64#11
  1962. # asm 2: add <mulrax=%rax,<t82=%r13
  1963. add %rax,%r13
  1964. # qhasm: t83 = 0
  1965. # asm 1: mov $0,>t83=int64#12
  1966. # asm 2: mov $0,>t83=%r14
  1967. mov $0,%r14
  1968. # qhasm: t83 += mulrdx + carry
  1969. # asm 1: adc <mulrdx=int64#3,<t83=int64#12
  1970. # asm 2: adc <mulrdx=%rdx,<t83=%r14
  1971. adc %rdx,%r14
  1972. # qhasm: mulrax = t13_stack
  1973. # asm 1: movq <t13_stack=stack64#11,>mulrax=int64#7
  1974. # asm 2: movq <t13_stack=80(%rsp),>mulrax=%rax
  1975. movq 80(%rsp),%rax
  1976. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  1977. # asm 1: mul <mulx0=int64#8
  1978. # asm 2: mul <mulx0=%r10
  1979. mul %r10
  1980. # qhasm: carry? t83 += mulrax
  1981. # asm 1: add <mulrax=int64#7,<t83=int64#12
  1982. # asm 2: add <mulrax=%rax,<t83=%r14
  1983. add %rax,%r14
  1984. # qhasm: mulr4 += mulrdx + carry
  1985. # asm 1: adc <mulrdx=int64#3,<mulr4=int64#2
  1986. # asm 2: adc <mulrdx=%rdx,<mulr4=%rsi
  1987. adc %rdx,%rsi
  1988. # qhasm: mulx1 = t41_stack
  1989. # asm 1: movq <t41_stack=stack64#33,>mulx1=int64#8
  1990. # asm 2: movq <t41_stack=256(%rsp),>mulx1=%r10
  1991. movq 256(%rsp),%r10
  1992. # qhasm: mulrax = t10_stack
  1993. # asm 1: movq <t10_stack=stack64#8,>mulrax=int64#7
  1994. # asm 2: movq <t10_stack=56(%rsp),>mulrax=%rax
  1995. movq 56(%rsp),%rax
  1996. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  1997. # asm 1: mul <mulx1=int64#8
  1998. # asm 2: mul <mulx1=%r10
  1999. mul %r10
  2000. # qhasm: carry? t81 += mulrax
  2001. # asm 1: add <mulrax=int64#7,<t81=int64#10
  2002. # asm 2: add <mulrax=%rax,<t81=%r12
  2003. add %rax,%r12
  2004. # qhasm: mulc = 0
  2005. # asm 1: mov $0,>mulc=int64#13
  2006. # asm 2: mov $0,>mulc=%r15
  2007. mov $0,%r15
  2008. # qhasm: mulc += mulrdx + carry
  2009. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2010. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2011. adc %rdx,%r15
  2012. # qhasm: mulrax = t11_stack
  2013. # asm 1: movq <t11_stack=stack64#9,>mulrax=int64#7
  2014. # asm 2: movq <t11_stack=64(%rsp),>mulrax=%rax
  2015. movq 64(%rsp),%rax
  2016. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  2017. # asm 1: mul <mulx1=int64#8
  2018. # asm 2: mul <mulx1=%r10
  2019. mul %r10
  2020. # qhasm: carry? t82 += mulrax
  2021. # asm 1: add <mulrax=int64#7,<t82=int64#11
  2022. # asm 2: add <mulrax=%rax,<t82=%r13
  2023. add %rax,%r13
  2024. # qhasm: mulrdx += 0 + carry
  2025. # asm 1: adc $0,<mulrdx=int64#3
  2026. # asm 2: adc $0,<mulrdx=%rdx
  2027. adc $0,%rdx
  2028. # qhasm: carry? t82 += mulc
  2029. # asm 1: add <mulc=int64#13,<t82=int64#11
  2030. # asm 2: add <mulc=%r15,<t82=%r13
  2031. add %r15,%r13
  2032. # qhasm: mulc = 0
  2033. # asm 1: mov $0,>mulc=int64#13
  2034. # asm 2: mov $0,>mulc=%r15
  2035. mov $0,%r15
  2036. # qhasm: mulc += mulrdx + carry
  2037. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2038. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2039. adc %rdx,%r15
  2040. # qhasm: mulrax = t12_stack
  2041. # asm 1: movq <t12_stack=stack64#10,>mulrax=int64#7
  2042. # asm 2: movq <t12_stack=72(%rsp),>mulrax=%rax
  2043. movq 72(%rsp),%rax
  2044. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  2045. # asm 1: mul <mulx1=int64#8
  2046. # asm 2: mul <mulx1=%r10
  2047. mul %r10
  2048. # qhasm: carry? t83 += mulrax
  2049. # asm 1: add <mulrax=int64#7,<t83=int64#12
  2050. # asm 2: add <mulrax=%rax,<t83=%r14
  2051. add %rax,%r14
  2052. # qhasm: mulrdx += 0 + carry
  2053. # asm 1: adc $0,<mulrdx=int64#3
  2054. # asm 2: adc $0,<mulrdx=%rdx
  2055. adc $0,%rdx
  2056. # qhasm: carry? t83 += mulc
  2057. # asm 1: add <mulc=int64#13,<t83=int64#12
  2058. # asm 2: add <mulc=%r15,<t83=%r14
  2059. add %r15,%r14
  2060. # qhasm: mulc = 0
  2061. # asm 1: mov $0,>mulc=int64#13
  2062. # asm 2: mov $0,>mulc=%r15
  2063. mov $0,%r15
  2064. # qhasm: mulc += mulrdx + carry
  2065. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2066. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2067. adc %rdx,%r15
  2068. # qhasm: mulrax = t13_stack
  2069. # asm 1: movq <t13_stack=stack64#11,>mulrax=int64#7
  2070. # asm 2: movq <t13_stack=80(%rsp),>mulrax=%rax
  2071. movq 80(%rsp),%rax
  2072. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  2073. # asm 1: mul <mulx1=int64#8
  2074. # asm 2: mul <mulx1=%r10
  2075. mul %r10
  2076. # qhasm: carry? mulr4 += mulrax
  2077. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  2078. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  2079. add %rax,%rsi
  2080. # qhasm: mulrdx += 0 + carry
  2081. # asm 1: adc $0,<mulrdx=int64#3
  2082. # asm 2: adc $0,<mulrdx=%rdx
  2083. adc $0,%rdx
  2084. # qhasm: carry? mulr4 += mulc
  2085. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  2086. # asm 2: add <mulc=%r15,<mulr4=%rsi
  2087. add %r15,%rsi
  2088. # qhasm: mulr5 += mulrdx + carry
  2089. # asm 1: adc <mulrdx=int64#3,<mulr5=int64#4
  2090. # asm 2: adc <mulrdx=%rdx,<mulr5=%rcx
  2091. adc %rdx,%rcx
  2092. # qhasm: mulx2 = t42_stack
  2093. # asm 1: movq <t42_stack=stack64#34,>mulx2=int64#8
  2094. # asm 2: movq <t42_stack=264(%rsp),>mulx2=%r10
  2095. movq 264(%rsp),%r10
  2096. # qhasm: mulrax = t10_stack
  2097. # asm 1: movq <t10_stack=stack64#8,>mulrax=int64#7
  2098. # asm 2: movq <t10_stack=56(%rsp),>mulrax=%rax
  2099. movq 56(%rsp),%rax
  2100. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  2101. # asm 1: mul <mulx2=int64#8
  2102. # asm 2: mul <mulx2=%r10
  2103. mul %r10
  2104. # qhasm: carry? t82 += mulrax
  2105. # asm 1: add <mulrax=int64#7,<t82=int64#11
  2106. # asm 2: add <mulrax=%rax,<t82=%r13
  2107. add %rax,%r13
  2108. # qhasm: mulc = 0
  2109. # asm 1: mov $0,>mulc=int64#13
  2110. # asm 2: mov $0,>mulc=%r15
  2111. mov $0,%r15
  2112. # qhasm: mulc += mulrdx + carry
  2113. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2114. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2115. adc %rdx,%r15
  2116. # qhasm: mulrax = t11_stack
  2117. # asm 1: movq <t11_stack=stack64#9,>mulrax=int64#7
  2118. # asm 2: movq <t11_stack=64(%rsp),>mulrax=%rax
  2119. movq 64(%rsp),%rax
  2120. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  2121. # asm 1: mul <mulx2=int64#8
  2122. # asm 2: mul <mulx2=%r10
  2123. mul %r10
  2124. # qhasm: carry? t83 += mulrax
  2125. # asm 1: add <mulrax=int64#7,<t83=int64#12
  2126. # asm 2: add <mulrax=%rax,<t83=%r14
  2127. add %rax,%r14
  2128. # qhasm: mulrdx += 0 + carry
  2129. # asm 1: adc $0,<mulrdx=int64#3
  2130. # asm 2: adc $0,<mulrdx=%rdx
  2131. adc $0,%rdx
  2132. # qhasm: carry? t83 += mulc
  2133. # asm 1: add <mulc=int64#13,<t83=int64#12
  2134. # asm 2: add <mulc=%r15,<t83=%r14
  2135. add %r15,%r14
  2136. # qhasm: mulc = 0
  2137. # asm 1: mov $0,>mulc=int64#13
  2138. # asm 2: mov $0,>mulc=%r15
  2139. mov $0,%r15
  2140. # qhasm: mulc += mulrdx + carry
  2141. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2142. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2143. adc %rdx,%r15
  2144. # qhasm: mulrax = t12_stack
  2145. # asm 1: movq <t12_stack=stack64#10,>mulrax=int64#7
  2146. # asm 2: movq <t12_stack=72(%rsp),>mulrax=%rax
  2147. movq 72(%rsp),%rax
  2148. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  2149. # asm 1: mul <mulx2=int64#8
  2150. # asm 2: mul <mulx2=%r10
  2151. mul %r10
  2152. # qhasm: carry? mulr4 += mulrax
  2153. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  2154. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  2155. add %rax,%rsi
  2156. # qhasm: mulrdx += 0 + carry
  2157. # asm 1: adc $0,<mulrdx=int64#3
  2158. # asm 2: adc $0,<mulrdx=%rdx
  2159. adc $0,%rdx
  2160. # qhasm: carry? mulr4 += mulc
  2161. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  2162. # asm 2: add <mulc=%r15,<mulr4=%rsi
  2163. add %r15,%rsi
  2164. # qhasm: mulc = 0
  2165. # asm 1: mov $0,>mulc=int64#13
  2166. # asm 2: mov $0,>mulc=%r15
  2167. mov $0,%r15
  2168. # qhasm: mulc += mulrdx + carry
  2169. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2170. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2171. adc %rdx,%r15
  2172. # qhasm: mulrax = t13_stack
  2173. # asm 1: movq <t13_stack=stack64#11,>mulrax=int64#7
  2174. # asm 2: movq <t13_stack=80(%rsp),>mulrax=%rax
  2175. movq 80(%rsp),%rax
  2176. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  2177. # asm 1: mul <mulx2=int64#8
  2178. # asm 2: mul <mulx2=%r10
  2179. mul %r10
  2180. # qhasm: carry? mulr5 += mulrax
  2181. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  2182. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  2183. add %rax,%rcx
  2184. # qhasm: mulrdx += 0 + carry
  2185. # asm 1: adc $0,<mulrdx=int64#3
  2186. # asm 2: adc $0,<mulrdx=%rdx
  2187. adc $0,%rdx
  2188. # qhasm: carry? mulr5 += mulc
  2189. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  2190. # asm 2: add <mulc=%r15,<mulr5=%rcx
  2191. add %r15,%rcx
  2192. # qhasm: mulr6 += mulrdx + carry
  2193. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  2194. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  2195. adc %rdx,%r8
  2196. # qhasm: mulx3 = t43_stack
  2197. # asm 1: movq <t43_stack=stack64#35,>mulx3=int64#8
  2198. # asm 2: movq <t43_stack=272(%rsp),>mulx3=%r10
  2199. movq 272(%rsp),%r10
  2200. # qhasm: mulrax = t10_stack
  2201. # asm 1: movq <t10_stack=stack64#8,>mulrax=int64#7
  2202. # asm 2: movq <t10_stack=56(%rsp),>mulrax=%rax
  2203. movq 56(%rsp),%rax
  2204. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  2205. # asm 1: mul <mulx3=int64#8
  2206. # asm 2: mul <mulx3=%r10
  2207. mul %r10
  2208. # qhasm: carry? t83 += mulrax
  2209. # asm 1: add <mulrax=int64#7,<t83=int64#12
  2210. # asm 2: add <mulrax=%rax,<t83=%r14
  2211. add %rax,%r14
  2212. # qhasm: mulc = 0
  2213. # asm 1: mov $0,>mulc=int64#13
  2214. # asm 2: mov $0,>mulc=%r15
  2215. mov $0,%r15
  2216. # qhasm: mulc += mulrdx + carry
  2217. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2218. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2219. adc %rdx,%r15
  2220. # qhasm: mulrax = t11_stack
  2221. # asm 1: movq <t11_stack=stack64#9,>mulrax=int64#7
  2222. # asm 2: movq <t11_stack=64(%rsp),>mulrax=%rax
  2223. movq 64(%rsp),%rax
  2224. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  2225. # asm 1: mul <mulx3=int64#8
  2226. # asm 2: mul <mulx3=%r10
  2227. mul %r10
  2228. # qhasm: carry? mulr4 += mulrax
  2229. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  2230. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  2231. add %rax,%rsi
  2232. # qhasm: mulrdx += 0 + carry
  2233. # asm 1: adc $0,<mulrdx=int64#3
  2234. # asm 2: adc $0,<mulrdx=%rdx
  2235. adc $0,%rdx
  2236. # qhasm: carry? mulr4 += mulc
  2237. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  2238. # asm 2: add <mulc=%r15,<mulr4=%rsi
  2239. add %r15,%rsi
  2240. # qhasm: mulc = 0
  2241. # asm 1: mov $0,>mulc=int64#13
  2242. # asm 2: mov $0,>mulc=%r15
  2243. mov $0,%r15
  2244. # qhasm: mulc += mulrdx + carry
  2245. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2246. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2247. adc %rdx,%r15
  2248. # qhasm: mulrax = t12_stack
  2249. # asm 1: movq <t12_stack=stack64#10,>mulrax=int64#7
  2250. # asm 2: movq <t12_stack=72(%rsp),>mulrax=%rax
  2251. movq 72(%rsp),%rax
  2252. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  2253. # asm 1: mul <mulx3=int64#8
  2254. # asm 2: mul <mulx3=%r10
  2255. mul %r10
  2256. # qhasm: carry? mulr5 += mulrax
  2257. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  2258. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  2259. add %rax,%rcx
  2260. # qhasm: mulrdx += 0 + carry
  2261. # asm 1: adc $0,<mulrdx=int64#3
  2262. # asm 2: adc $0,<mulrdx=%rdx
  2263. adc $0,%rdx
  2264. # qhasm: carry? mulr5 += mulc
  2265. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  2266. # asm 2: add <mulc=%r15,<mulr5=%rcx
  2267. add %r15,%rcx
  2268. # qhasm: mulc = 0
  2269. # asm 1: mov $0,>mulc=int64#13
  2270. # asm 2: mov $0,>mulc=%r15
  2271. mov $0,%r15
  2272. # qhasm: mulc += mulrdx + carry
  2273. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  2274. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  2275. adc %rdx,%r15
  2276. # qhasm: mulrax = t13_stack
  2277. # asm 1: movq <t13_stack=stack64#11,>mulrax=int64#7
  2278. # asm 2: movq <t13_stack=80(%rsp),>mulrax=%rax
  2279. movq 80(%rsp),%rax
  2280. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  2281. # asm 1: mul <mulx3=int64#8
  2282. # asm 2: mul <mulx3=%r10
  2283. mul %r10
  2284. # qhasm: carry? mulr6 += mulrax
  2285. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  2286. # asm 2: add <mulrax=%rax,<mulr6=%r8
  2287. add %rax,%r8
  2288. # qhasm: mulrdx += 0 + carry
  2289. # asm 1: adc $0,<mulrdx=int64#3
  2290. # asm 2: adc $0,<mulrdx=%rdx
  2291. adc $0,%rdx
  2292. # qhasm: carry? mulr6 += mulc
  2293. # asm 1: add <mulc=int64#13,<mulr6=int64#5
  2294. # asm 2: add <mulc=%r15,<mulr6=%r8
  2295. add %r15,%r8
  2296. # qhasm: mulr7 += mulrdx + carry
  2297. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  2298. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  2299. adc %rdx,%r9
  2300. # qhasm: mulrax = mulr4
  2301. # asm 1: mov <mulr4=int64#2,>mulrax=int64#7
  2302. # asm 2: mov <mulr4=%rsi,>mulrax=%rax
  2303. mov %rsi,%rax
  2304. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2305. mulq crypto_scalarmult_curve25519_amd64_64_38
  2306. # qhasm: mulr4 = mulrax
  2307. # asm 1: mov <mulrax=int64#7,>mulr4=int64#2
  2308. # asm 2: mov <mulrax=%rax,>mulr4=%rsi
  2309. mov %rax,%rsi
  2310. # qhasm: mulrax = mulr5
  2311. # asm 1: mov <mulr5=int64#4,>mulrax=int64#7
  2312. # asm 2: mov <mulr5=%rcx,>mulrax=%rax
  2313. mov %rcx,%rax
  2314. # qhasm: mulr5 = mulrdx
  2315. # asm 1: mov <mulrdx=int64#3,>mulr5=int64#4
  2316. # asm 2: mov <mulrdx=%rdx,>mulr5=%rcx
  2317. mov %rdx,%rcx
  2318. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2319. mulq crypto_scalarmult_curve25519_amd64_64_38
  2320. # qhasm: carry? mulr5 += mulrax
  2321. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  2322. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  2323. add %rax,%rcx
  2324. # qhasm: mulrax = mulr6
  2325. # asm 1: mov <mulr6=int64#5,>mulrax=int64#7
  2326. # asm 2: mov <mulr6=%r8,>mulrax=%rax
  2327. mov %r8,%rax
  2328. # qhasm: mulr6 = 0
  2329. # asm 1: mov $0,>mulr6=int64#5
  2330. # asm 2: mov $0,>mulr6=%r8
  2331. mov $0,%r8
  2332. # qhasm: mulr6 += mulrdx + carry
  2333. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  2334. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  2335. adc %rdx,%r8
  2336. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2337. mulq crypto_scalarmult_curve25519_amd64_64_38
  2338. # qhasm: carry? mulr6 += mulrax
  2339. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  2340. # asm 2: add <mulrax=%rax,<mulr6=%r8
  2341. add %rax,%r8
  2342. # qhasm: mulrax = mulr7
  2343. # asm 1: mov <mulr7=int64#6,>mulrax=int64#7
  2344. # asm 2: mov <mulr7=%r9,>mulrax=%rax
  2345. mov %r9,%rax
  2346. # qhasm: mulr7 = 0
  2347. # asm 1: mov $0,>mulr7=int64#6
  2348. # asm 2: mov $0,>mulr7=%r9
  2349. mov $0,%r9
  2350. # qhasm: mulr7 += mulrdx + carry
  2351. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  2352. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  2353. adc %rdx,%r9
  2354. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2355. mulq crypto_scalarmult_curve25519_amd64_64_38
  2356. # qhasm: carry? mulr7 += mulrax
  2357. # asm 1: add <mulrax=int64#7,<mulr7=int64#6
  2358. # asm 2: add <mulrax=%rax,<mulr7=%r9
  2359. add %rax,%r9
  2360. # qhasm: mulr8 = 0
  2361. # asm 1: mov $0,>mulr8=int64#7
  2362. # asm 2: mov $0,>mulr8=%rax
  2363. mov $0,%rax
  2364. # qhasm: mulr8 += mulrdx + carry
  2365. # asm 1: adc <mulrdx=int64#3,<mulr8=int64#7
  2366. # asm 2: adc <mulrdx=%rdx,<mulr8=%rax
  2367. adc %rdx,%rax
  2368. # qhasm: carry? t80 += mulr4
  2369. # asm 1: add <mulr4=int64#2,<t80=int64#9
  2370. # asm 2: add <mulr4=%rsi,<t80=%r11
  2371. add %rsi,%r11
  2372. # qhasm: carry? t81 += mulr5 + carry
  2373. # asm 1: adc <mulr5=int64#4,<t81=int64#10
  2374. # asm 2: adc <mulr5=%rcx,<t81=%r12
  2375. adc %rcx,%r12
  2376. # qhasm: carry? t82 += mulr6 + carry
  2377. # asm 1: adc <mulr6=int64#5,<t82=int64#11
  2378. # asm 2: adc <mulr6=%r8,<t82=%r13
  2379. adc %r8,%r13
  2380. # qhasm: carry? t83 += mulr7 + carry
  2381. # asm 1: adc <mulr7=int64#6,<t83=int64#12
  2382. # asm 2: adc <mulr7=%r9,<t83=%r14
  2383. adc %r9,%r14
  2384. # qhasm: mulzero = 0
  2385. # asm 1: mov $0,>mulzero=int64#2
  2386. # asm 2: mov $0,>mulzero=%rsi
  2387. mov $0,%rsi
  2388. # qhasm: mulr8 += mulzero + carry
  2389. # asm 1: adc <mulzero=int64#2,<mulr8=int64#7
  2390. # asm 2: adc <mulzero=%rsi,<mulr8=%rax
  2391. adc %rsi,%rax
  2392. # qhasm: mulr8 *= 38
  2393. # asm 1: imulq $38,<mulr8=int64#7,>mulr8=int64#3
  2394. # asm 2: imulq $38,<mulr8=%rax,>mulr8=%rdx
  2395. imulq $38,%rax,%rdx
  2396. # qhasm: carry? t80 += mulr8
  2397. # asm 1: add <mulr8=int64#3,<t80=int64#9
  2398. # asm 2: add <mulr8=%rdx,<t80=%r11
  2399. add %rdx,%r11
  2400. # qhasm: carry? t81 += mulzero + carry
  2401. # asm 1: adc <mulzero=int64#2,<t81=int64#10
  2402. # asm 2: adc <mulzero=%rsi,<t81=%r12
  2403. adc %rsi,%r12
  2404. # qhasm: carry? t82 += mulzero + carry
  2405. # asm 1: adc <mulzero=int64#2,<t82=int64#11
  2406. # asm 2: adc <mulzero=%rsi,<t82=%r13
  2407. adc %rsi,%r13
  2408. # qhasm: carry? t83 += mulzero + carry
  2409. # asm 1: adc <mulzero=int64#2,<t83=int64#12
  2410. # asm 2: adc <mulzero=%rsi,<t83=%r14
  2411. adc %rsi,%r14
  2412. # qhasm: mulzero += mulzero + carry
  2413. # asm 1: adc <mulzero=int64#2,<mulzero=int64#2
  2414. # asm 2: adc <mulzero=%rsi,<mulzero=%rsi
  2415. adc %rsi,%rsi
  2416. # qhasm: mulzero *= 38
  2417. # asm 1: imulq $38,<mulzero=int64#2,>mulzero=int64#2
  2418. # asm 2: imulq $38,<mulzero=%rsi,>mulzero=%rsi
  2419. imulq $38,%rsi,%rsi
  2420. # qhasm: t80 += mulzero
  2421. # asm 1: add <mulzero=int64#2,<t80=int64#9
  2422. # asm 2: add <mulzero=%rsi,<t80=%r11
  2423. add %rsi,%r11
  2424. # qhasm: zq0 = t80
  2425. # asm 1: mov <t80=int64#9,>zq0=int64#2
  2426. # asm 2: mov <t80=%r11,>zq0=%rsi
  2427. mov %r11,%rsi
  2428. # qhasm: zq1 = t81
  2429. # asm 1: mov <t81=int64#10,>zq1=int64#3
  2430. # asm 2: mov <t81=%r12,>zq1=%rdx
  2431. mov %r12,%rdx
  2432. # qhasm: zq2 = t82
  2433. # asm 1: mov <t82=int64#11,>zq2=int64#4
  2434. # asm 2: mov <t82=%r13,>zq2=%rcx
  2435. mov %r13,%rcx
  2436. # qhasm: zq3 = t83
  2437. # asm 1: mov <t83=int64#12,>zq3=int64#5
  2438. # asm 2: mov <t83=%r14,>zq3=%r8
  2439. mov %r14,%r8
  2440. # qhasm: carry? zq0 -= t90_stack
  2441. # asm 1: subq <t90_stack=stack64#12,<zq0=int64#2
  2442. # asm 2: subq <t90_stack=88(%rsp),<zq0=%rsi
  2443. subq 88(%rsp),%rsi
  2444. # qhasm: carry? zq1 -= t91_stack - carry
  2445. # asm 1: sbbq <t91_stack=stack64#13,<zq1=int64#3
  2446. # asm 2: sbbq <t91_stack=96(%rsp),<zq1=%rdx
  2447. sbbq 96(%rsp),%rdx
  2448. # qhasm: carry? zq2 -= t92_stack - carry
  2449. # asm 1: sbbq <t92_stack=stack64#14,<zq2=int64#4
  2450. # asm 2: sbbq <t92_stack=104(%rsp),<zq2=%rcx
  2451. sbbq 104(%rsp),%rcx
  2452. # qhasm: carry? zq3 -= t93_stack - carry
  2453. # asm 1: sbbq <t93_stack=stack64#15,<zq3=int64#5
  2454. # asm 2: sbbq <t93_stack=112(%rsp),<zq3=%r8
  2455. sbbq 112(%rsp),%r8
  2456. # qhasm: subt0 = 0
  2457. # asm 1: mov $0,>subt0=int64#6
  2458. # asm 2: mov $0,>subt0=%r9
  2459. mov $0,%r9
  2460. # qhasm: subt1 = 38
  2461. # asm 1: mov $38,>subt1=int64#7
  2462. # asm 2: mov $38,>subt1=%rax
  2463. mov $38,%rax
  2464. # qhasm: subt1 = subt0 if !carry
  2465. # asm 1: cmovae <subt0=int64#6,<subt1=int64#7
  2466. # asm 2: cmovae <subt0=%r9,<subt1=%rax
  2467. cmovae %r9,%rax
  2468. # qhasm: carry? zq0 -= subt1
  2469. # asm 1: sub <subt1=int64#7,<zq0=int64#2
  2470. # asm 2: sub <subt1=%rax,<zq0=%rsi
  2471. sub %rax,%rsi
  2472. # qhasm: carry? zq1 -= subt0 - carry
  2473. # asm 1: sbb <subt0=int64#6,<zq1=int64#3
  2474. # asm 2: sbb <subt0=%r9,<zq1=%rdx
  2475. sbb %r9,%rdx
  2476. # qhasm: carry? zq2 -= subt0 - carry
  2477. # asm 1: sbb <subt0=int64#6,<zq2=int64#4
  2478. # asm 2: sbb <subt0=%r9,<zq2=%rcx
  2479. sbb %r9,%rcx
  2480. # qhasm: carry? zq3 -= subt0 - carry
  2481. # asm 1: sbb <subt0=int64#6,<zq3=int64#5
  2482. # asm 2: sbb <subt0=%r9,<zq3=%r8
  2483. sbb %r9,%r8
  2484. # qhasm: subt0 = subt1 if carry
  2485. # asm 1: cmovc <subt1=int64#7,<subt0=int64#6
  2486. # asm 2: cmovc <subt1=%rax,<subt0=%r9
  2487. cmovc %rax,%r9
  2488. # qhasm: zq0 -= subt0
  2489. # asm 1: sub <subt0=int64#6,<zq0=int64#2
  2490. # asm 2: sub <subt0=%r9,<zq0=%rsi
  2491. sub %r9,%rsi
  2492. # qhasm: carry? t80 += t90_stack
  2493. # asm 1: addq <t90_stack=stack64#12,<t80=int64#9
  2494. # asm 2: addq <t90_stack=88(%rsp),<t80=%r11
  2495. addq 88(%rsp),%r11
  2496. # qhasm: carry? t81 += t91_stack + carry
  2497. # asm 1: adcq <t91_stack=stack64#13,<t81=int64#10
  2498. # asm 2: adcq <t91_stack=96(%rsp),<t81=%r12
  2499. adcq 96(%rsp),%r12
  2500. # qhasm: carry? t82 += t92_stack + carry
  2501. # asm 1: adcq <t92_stack=stack64#14,<t82=int64#11
  2502. # asm 2: adcq <t92_stack=104(%rsp),<t82=%r13
  2503. adcq 104(%rsp),%r13
  2504. # qhasm: carry? t83 += t93_stack + carry
  2505. # asm 1: adcq <t93_stack=stack64#15,<t83=int64#12
  2506. # asm 2: adcq <t93_stack=112(%rsp),<t83=%r14
  2507. adcq 112(%rsp),%r14
  2508. # qhasm: addt0 = 0
  2509. # asm 1: mov $0,>addt0=int64#6
  2510. # asm 2: mov $0,>addt0=%r9
  2511. mov $0,%r9
  2512. # qhasm: addt1 = 38
  2513. # asm 1: mov $38,>addt1=int64#7
  2514. # asm 2: mov $38,>addt1=%rax
  2515. mov $38,%rax
  2516. # qhasm: addt1 = addt0 if !carry
  2517. # asm 1: cmovae <addt0=int64#6,<addt1=int64#7
  2518. # asm 2: cmovae <addt0=%r9,<addt1=%rax
  2519. cmovae %r9,%rax
  2520. # qhasm: carry? t80 += addt1
  2521. # asm 1: add <addt1=int64#7,<t80=int64#9
  2522. # asm 2: add <addt1=%rax,<t80=%r11
  2523. add %rax,%r11
  2524. # qhasm: carry? t81 += addt0 + carry
  2525. # asm 1: adc <addt0=int64#6,<t81=int64#10
  2526. # asm 2: adc <addt0=%r9,<t81=%r12
  2527. adc %r9,%r12
  2528. # qhasm: carry? t82 += addt0 + carry
  2529. # asm 1: adc <addt0=int64#6,<t82=int64#11
  2530. # asm 2: adc <addt0=%r9,<t82=%r13
  2531. adc %r9,%r13
  2532. # qhasm: carry? t83 += addt0 + carry
  2533. # asm 1: adc <addt0=int64#6,<t83=int64#12
  2534. # asm 2: adc <addt0=%r9,<t83=%r14
  2535. adc %r9,%r14
  2536. # qhasm: addt0 = addt1 if carry
  2537. # asm 1: cmovc <addt1=int64#7,<addt0=int64#6
  2538. # asm 2: cmovc <addt1=%rax,<addt0=%r9
  2539. cmovc %rax,%r9
  2540. # qhasm: t80 += addt0
  2541. # asm 1: add <addt0=int64#6,<t80=int64#9
  2542. # asm 2: add <addt0=%r9,<t80=%r11
  2543. add %r9,%r11
  2544. # qhasm: *(uint64 *)(workp + 96) = t80
  2545. # asm 1: movq <t80=int64#9,96(<workp=int64#1)
  2546. # asm 2: movq <t80=%r11,96(<workp=%rdi)
  2547. movq %r11,96(%rdi)
  2548. # qhasm: *(uint64 *)(workp + 104) = t81
  2549. # asm 1: movq <t81=int64#10,104(<workp=int64#1)
  2550. # asm 2: movq <t81=%r12,104(<workp=%rdi)
  2551. movq %r12,104(%rdi)
  2552. # qhasm: *(uint64 *)(workp + 112) = t82
  2553. # asm 1: movq <t82=int64#11,112(<workp=int64#1)
  2554. # asm 2: movq <t82=%r13,112(<workp=%rdi)
  2555. movq %r13,112(%rdi)
  2556. # qhasm: *(uint64 *)(workp + 120) = t83
  2557. # asm 1: movq <t83=int64#12,120(<workp=int64#1)
  2558. # asm 2: movq <t83=%r14,120(<workp=%rdi)
  2559. movq %r14,120(%rdi)
  2560. # qhasm: *(uint64 *)(workp + 128) = zq0
  2561. # asm 1: movq <zq0=int64#2,128(<workp=int64#1)
  2562. # asm 2: movq <zq0=%rsi,128(<workp=%rdi)
  2563. movq %rsi,128(%rdi)
  2564. # qhasm: *(uint64 *)(workp + 136) = zq1
  2565. # asm 1: movq <zq1=int64#3,136(<workp=int64#1)
  2566. # asm 2: movq <zq1=%rdx,136(<workp=%rdi)
  2567. movq %rdx,136(%rdi)
  2568. # qhasm: *(uint64 *)(workp + 144) = zq2
  2569. # asm 1: movq <zq2=int64#4,144(<workp=int64#1)
  2570. # asm 2: movq <zq2=%rcx,144(<workp=%rdi)
  2571. movq %rcx,144(%rdi)
  2572. # qhasm: *(uint64 *)(workp + 152) = zq3
  2573. # asm 1: movq <zq3=int64#5,152(<workp=int64#1)
  2574. # asm 2: movq <zq3=%r8,152(<workp=%rdi)
  2575. movq %r8,152(%rdi)
  2576. # qhasm: squarer7 = 0
  2577. # asm 1: mov $0,>squarer7=int64#2
  2578. # asm 2: mov $0,>squarer7=%rsi
  2579. mov $0,%rsi
  2580. # qhasm: squarerax = *(uint64 *)(workp + 104)
  2581. # asm 1: movq 104(<workp=int64#1),>squarerax=int64#7
  2582. # asm 2: movq 104(<workp=%rdi),>squarerax=%rax
  2583. movq 104(%rdi),%rax
  2584. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 96)
  2585. # asm 1: mulq 96(<workp=int64#1)
  2586. # asm 2: mulq 96(<workp=%rdi)
  2587. mulq 96(%rdi)
  2588. # qhasm: xq1 = squarerax
  2589. # asm 1: mov <squarerax=int64#7,>xq1=int64#4
  2590. # asm 2: mov <squarerax=%rax,>xq1=%rcx
  2591. mov %rax,%rcx
  2592. # qhasm: xq2 = squarerdx
  2593. # asm 1: mov <squarerdx=int64#3,>xq2=int64#5
  2594. # asm 2: mov <squarerdx=%rdx,>xq2=%r8
  2595. mov %rdx,%r8
  2596. # qhasm: squarerax = *(uint64 *)(workp + 112)
  2597. # asm 1: movq 112(<workp=int64#1),>squarerax=int64#7
  2598. # asm 2: movq 112(<workp=%rdi),>squarerax=%rax
  2599. movq 112(%rdi),%rax
  2600. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 104)
  2601. # asm 1: mulq 104(<workp=int64#1)
  2602. # asm 2: mulq 104(<workp=%rdi)
  2603. mulq 104(%rdi)
  2604. # qhasm: xq3 = squarerax
  2605. # asm 1: mov <squarerax=int64#7,>xq3=int64#6
  2606. # asm 2: mov <squarerax=%rax,>xq3=%r9
  2607. mov %rax,%r9
  2608. # qhasm: squarer4 = squarerdx
  2609. # asm 1: mov <squarerdx=int64#3,>squarer4=int64#8
  2610. # asm 2: mov <squarerdx=%rdx,>squarer4=%r10
  2611. mov %rdx,%r10
  2612. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2613. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2614. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2615. movq 120(%rdi),%rax
  2616. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 112)
  2617. # asm 1: mulq 112(<workp=int64#1)
  2618. # asm 2: mulq 112(<workp=%rdi)
  2619. mulq 112(%rdi)
  2620. # qhasm: squarer5 = squarerax
  2621. # asm 1: mov <squarerax=int64#7,>squarer5=int64#9
  2622. # asm 2: mov <squarerax=%rax,>squarer5=%r11
  2623. mov %rax,%r11
  2624. # qhasm: squarer6 = squarerdx
  2625. # asm 1: mov <squarerdx=int64#3,>squarer6=int64#10
  2626. # asm 2: mov <squarerdx=%rdx,>squarer6=%r12
  2627. mov %rdx,%r12
  2628. # qhasm: squarerax = *(uint64 *)(workp + 112)
  2629. # asm 1: movq 112(<workp=int64#1),>squarerax=int64#7
  2630. # asm 2: movq 112(<workp=%rdi),>squarerax=%rax
  2631. movq 112(%rdi),%rax
  2632. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 96)
  2633. # asm 1: mulq 96(<workp=int64#1)
  2634. # asm 2: mulq 96(<workp=%rdi)
  2635. mulq 96(%rdi)
  2636. # qhasm: carry? xq2 += squarerax
  2637. # asm 1: add <squarerax=int64#7,<xq2=int64#5
  2638. # asm 2: add <squarerax=%rax,<xq2=%r8
  2639. add %rax,%r8
  2640. # qhasm: carry? xq3 += squarerdx + carry
  2641. # asm 1: adc <squarerdx=int64#3,<xq3=int64#6
  2642. # asm 2: adc <squarerdx=%rdx,<xq3=%r9
  2643. adc %rdx,%r9
  2644. # qhasm: squarer4 += 0 + carry
  2645. # asm 1: adc $0,<squarer4=int64#8
  2646. # asm 2: adc $0,<squarer4=%r10
  2647. adc $0,%r10
  2648. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2649. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2650. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2651. movq 120(%rdi),%rax
  2652. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 104)
  2653. # asm 1: mulq 104(<workp=int64#1)
  2654. # asm 2: mulq 104(<workp=%rdi)
  2655. mulq 104(%rdi)
  2656. # qhasm: carry? squarer4 += squarerax
  2657. # asm 1: add <squarerax=int64#7,<squarer4=int64#8
  2658. # asm 2: add <squarerax=%rax,<squarer4=%r10
  2659. add %rax,%r10
  2660. # qhasm: carry? squarer5 += squarerdx + carry
  2661. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  2662. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  2663. adc %rdx,%r11
  2664. # qhasm: squarer6 += 0 + carry
  2665. # asm 1: adc $0,<squarer6=int64#10
  2666. # asm 2: adc $0,<squarer6=%r12
  2667. adc $0,%r12
  2668. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2669. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2670. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2671. movq 120(%rdi),%rax
  2672. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 96)
  2673. # asm 1: mulq 96(<workp=int64#1)
  2674. # asm 2: mulq 96(<workp=%rdi)
  2675. mulq 96(%rdi)
  2676. # qhasm: carry? xq3 += squarerax
  2677. # asm 1: add <squarerax=int64#7,<xq3=int64#6
  2678. # asm 2: add <squarerax=%rax,<xq3=%r9
  2679. add %rax,%r9
  2680. # qhasm: carry? squarer4 += squarerdx + carry
  2681. # asm 1: adc <squarerdx=int64#3,<squarer4=int64#8
  2682. # asm 2: adc <squarerdx=%rdx,<squarer4=%r10
  2683. adc %rdx,%r10
  2684. # qhasm: carry? squarer5 += 0 + carry
  2685. # asm 1: adc $0,<squarer5=int64#9
  2686. # asm 2: adc $0,<squarer5=%r11
  2687. adc $0,%r11
  2688. # qhasm: carry? squarer6 += 0 + carry
  2689. # asm 1: adc $0,<squarer6=int64#10
  2690. # asm 2: adc $0,<squarer6=%r12
  2691. adc $0,%r12
  2692. # qhasm: squarer7 += 0 + carry
  2693. # asm 1: adc $0,<squarer7=int64#2
  2694. # asm 2: adc $0,<squarer7=%rsi
  2695. adc $0,%rsi
  2696. # qhasm: carry? xq1 += xq1
  2697. # asm 1: add <xq1=int64#4,<xq1=int64#4
  2698. # asm 2: add <xq1=%rcx,<xq1=%rcx
  2699. add %rcx,%rcx
  2700. # qhasm: carry? xq2 += xq2 + carry
  2701. # asm 1: adc <xq2=int64#5,<xq2=int64#5
  2702. # asm 2: adc <xq2=%r8,<xq2=%r8
  2703. adc %r8,%r8
  2704. # qhasm: carry? xq3 += xq3 + carry
  2705. # asm 1: adc <xq3=int64#6,<xq3=int64#6
  2706. # asm 2: adc <xq3=%r9,<xq3=%r9
  2707. adc %r9,%r9
  2708. # qhasm: carry? squarer4 += squarer4 + carry
  2709. # asm 1: adc <squarer4=int64#8,<squarer4=int64#8
  2710. # asm 2: adc <squarer4=%r10,<squarer4=%r10
  2711. adc %r10,%r10
  2712. # qhasm: carry? squarer5 += squarer5 + carry
  2713. # asm 1: adc <squarer5=int64#9,<squarer5=int64#9
  2714. # asm 2: adc <squarer5=%r11,<squarer5=%r11
  2715. adc %r11,%r11
  2716. # qhasm: carry? squarer6 += squarer6 + carry
  2717. # asm 1: adc <squarer6=int64#10,<squarer6=int64#10
  2718. # asm 2: adc <squarer6=%r12,<squarer6=%r12
  2719. adc %r12,%r12
  2720. # qhasm: squarer7 += squarer7 + carry
  2721. # asm 1: adc <squarer7=int64#2,<squarer7=int64#2
  2722. # asm 2: adc <squarer7=%rsi,<squarer7=%rsi
  2723. adc %rsi,%rsi
  2724. # qhasm: squarerax = *(uint64 *)(workp + 96)
  2725. # asm 1: movq 96(<workp=int64#1),>squarerax=int64#7
  2726. # asm 2: movq 96(<workp=%rdi),>squarerax=%rax
  2727. movq 96(%rdi),%rax
  2728. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 96)
  2729. # asm 1: mulq 96(<workp=int64#1)
  2730. # asm 2: mulq 96(<workp=%rdi)
  2731. mulq 96(%rdi)
  2732. # qhasm: xq0 = squarerax
  2733. # asm 1: mov <squarerax=int64#7,>xq0=int64#11
  2734. # asm 2: mov <squarerax=%rax,>xq0=%r13
  2735. mov %rax,%r13
  2736. # qhasm: squaret1 = squarerdx
  2737. # asm 1: mov <squarerdx=int64#3,>squaret1=int64#12
  2738. # asm 2: mov <squarerdx=%rdx,>squaret1=%r14
  2739. mov %rdx,%r14
  2740. # qhasm: squarerax = *(uint64 *)(workp + 104)
  2741. # asm 1: movq 104(<workp=int64#1),>squarerax=int64#7
  2742. # asm 2: movq 104(<workp=%rdi),>squarerax=%rax
  2743. movq 104(%rdi),%rax
  2744. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 104)
  2745. # asm 1: mulq 104(<workp=int64#1)
  2746. # asm 2: mulq 104(<workp=%rdi)
  2747. mulq 104(%rdi)
  2748. # qhasm: squaret2 = squarerax
  2749. # asm 1: mov <squarerax=int64#7,>squaret2=int64#13
  2750. # asm 2: mov <squarerax=%rax,>squaret2=%r15
  2751. mov %rax,%r15
  2752. # qhasm: squaret3 = squarerdx
  2753. # asm 1: mov <squarerdx=int64#3,>squaret3=int64#14
  2754. # asm 2: mov <squarerdx=%rdx,>squaret3=%rbx
  2755. mov %rdx,%rbx
  2756. # qhasm: squarerax = *(uint64 *)(workp + 112)
  2757. # asm 1: movq 112(<workp=int64#1),>squarerax=int64#7
  2758. # asm 2: movq 112(<workp=%rdi),>squarerax=%rax
  2759. movq 112(%rdi),%rax
  2760. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 112)
  2761. # asm 1: mulq 112(<workp=int64#1)
  2762. # asm 2: mulq 112(<workp=%rdi)
  2763. mulq 112(%rdi)
  2764. # qhasm: carry? xq1 += squaret1
  2765. # asm 1: add <squaret1=int64#12,<xq1=int64#4
  2766. # asm 2: add <squaret1=%r14,<xq1=%rcx
  2767. add %r14,%rcx
  2768. # qhasm: carry? xq2 += squaret2 + carry
  2769. # asm 1: adc <squaret2=int64#13,<xq2=int64#5
  2770. # asm 2: adc <squaret2=%r15,<xq2=%r8
  2771. adc %r15,%r8
  2772. # qhasm: carry? xq3 += squaret3 + carry
  2773. # asm 1: adc <squaret3=int64#14,<xq3=int64#6
  2774. # asm 2: adc <squaret3=%rbx,<xq3=%r9
  2775. adc %rbx,%r9
  2776. # qhasm: carry? squarer4 += squarerax + carry
  2777. # asm 1: adc <squarerax=int64#7,<squarer4=int64#8
  2778. # asm 2: adc <squarerax=%rax,<squarer4=%r10
  2779. adc %rax,%r10
  2780. # qhasm: carry? squarer5 += squarerdx + carry
  2781. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  2782. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  2783. adc %rdx,%r11
  2784. # qhasm: carry? squarer6 += 0 + carry
  2785. # asm 1: adc $0,<squarer6=int64#10
  2786. # asm 2: adc $0,<squarer6=%r12
  2787. adc $0,%r12
  2788. # qhasm: squarer7 += 0 + carry
  2789. # asm 1: adc $0,<squarer7=int64#2
  2790. # asm 2: adc $0,<squarer7=%rsi
  2791. adc $0,%rsi
  2792. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2793. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2794. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2795. movq 120(%rdi),%rax
  2796. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 120)
  2797. # asm 1: mulq 120(<workp=int64#1)
  2798. # asm 2: mulq 120(<workp=%rdi)
  2799. mulq 120(%rdi)
  2800. # qhasm: carry? squarer6 += squarerax
  2801. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  2802. # asm 2: add <squarerax=%rax,<squarer6=%r12
  2803. add %rax,%r12
  2804. # qhasm: squarer7 += squarerdx + carry
  2805. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  2806. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  2807. adc %rdx,%rsi
  2808. # qhasm: squarerax = squarer4
  2809. # asm 1: mov <squarer4=int64#8,>squarerax=int64#7
  2810. # asm 2: mov <squarer4=%r10,>squarerax=%rax
  2811. mov %r10,%rax
  2812. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2813. mulq crypto_scalarmult_curve25519_amd64_64_38
  2814. # qhasm: squarer4 = squarerax
  2815. # asm 1: mov <squarerax=int64#7,>squarer4=int64#8
  2816. # asm 2: mov <squarerax=%rax,>squarer4=%r10
  2817. mov %rax,%r10
  2818. # qhasm: squarerax = squarer5
  2819. # asm 1: mov <squarer5=int64#9,>squarerax=int64#7
  2820. # asm 2: mov <squarer5=%r11,>squarerax=%rax
  2821. mov %r11,%rax
  2822. # qhasm: squarer5 = squarerdx
  2823. # asm 1: mov <squarerdx=int64#3,>squarer5=int64#9
  2824. # asm 2: mov <squarerdx=%rdx,>squarer5=%r11
  2825. mov %rdx,%r11
  2826. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2827. mulq crypto_scalarmult_curve25519_amd64_64_38
  2828. # qhasm: carry? squarer5 += squarerax
  2829. # asm 1: add <squarerax=int64#7,<squarer5=int64#9
  2830. # asm 2: add <squarerax=%rax,<squarer5=%r11
  2831. add %rax,%r11
  2832. # qhasm: squarerax = squarer6
  2833. # asm 1: mov <squarer6=int64#10,>squarerax=int64#7
  2834. # asm 2: mov <squarer6=%r12,>squarerax=%rax
  2835. mov %r12,%rax
  2836. # qhasm: squarer6 = 0
  2837. # asm 1: mov $0,>squarer6=int64#10
  2838. # asm 2: mov $0,>squarer6=%r12
  2839. mov $0,%r12
  2840. # qhasm: squarer6 += squarerdx + carry
  2841. # asm 1: adc <squarerdx=int64#3,<squarer6=int64#10
  2842. # asm 2: adc <squarerdx=%rdx,<squarer6=%r12
  2843. adc %rdx,%r12
  2844. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2845. mulq crypto_scalarmult_curve25519_amd64_64_38
  2846. # qhasm: carry? squarer6 += squarerax
  2847. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  2848. # asm 2: add <squarerax=%rax,<squarer6=%r12
  2849. add %rax,%r12
  2850. # qhasm: squarerax = squarer7
  2851. # asm 1: mov <squarer7=int64#2,>squarerax=int64#7
  2852. # asm 2: mov <squarer7=%rsi,>squarerax=%rax
  2853. mov %rsi,%rax
  2854. # qhasm: squarer7 = 0
  2855. # asm 1: mov $0,>squarer7=int64#2
  2856. # asm 2: mov $0,>squarer7=%rsi
  2857. mov $0,%rsi
  2858. # qhasm: squarer7 += squarerdx + carry
  2859. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  2860. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  2861. adc %rdx,%rsi
  2862. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  2863. mulq crypto_scalarmult_curve25519_amd64_64_38
  2864. # qhasm: carry? squarer7 += squarerax
  2865. # asm 1: add <squarerax=int64#7,<squarer7=int64#2
  2866. # asm 2: add <squarerax=%rax,<squarer7=%rsi
  2867. add %rax,%rsi
  2868. # qhasm: squarer8 = 0
  2869. # asm 1: mov $0,>squarer8=int64#7
  2870. # asm 2: mov $0,>squarer8=%rax
  2871. mov $0,%rax
  2872. # qhasm: squarer8 += squarerdx + carry
  2873. # asm 1: adc <squarerdx=int64#3,<squarer8=int64#7
  2874. # asm 2: adc <squarerdx=%rdx,<squarer8=%rax
  2875. adc %rdx,%rax
  2876. # qhasm: carry? xq0 += squarer4
  2877. # asm 1: add <squarer4=int64#8,<xq0=int64#11
  2878. # asm 2: add <squarer4=%r10,<xq0=%r13
  2879. add %r10,%r13
  2880. # qhasm: carry? xq1 += squarer5 + carry
  2881. # asm 1: adc <squarer5=int64#9,<xq1=int64#4
  2882. # asm 2: adc <squarer5=%r11,<xq1=%rcx
  2883. adc %r11,%rcx
  2884. # qhasm: carry? xq2 += squarer6 + carry
  2885. # asm 1: adc <squarer6=int64#10,<xq2=int64#5
  2886. # asm 2: adc <squarer6=%r12,<xq2=%r8
  2887. adc %r12,%r8
  2888. # qhasm: carry? xq3 += squarer7 + carry
  2889. # asm 1: adc <squarer7=int64#2,<xq3=int64#6
  2890. # asm 2: adc <squarer7=%rsi,<xq3=%r9
  2891. adc %rsi,%r9
  2892. # qhasm: squarezero = 0
  2893. # asm 1: mov $0,>squarezero=int64#2
  2894. # asm 2: mov $0,>squarezero=%rsi
  2895. mov $0,%rsi
  2896. # qhasm: squarer8 += squarezero + carry
  2897. # asm 1: adc <squarezero=int64#2,<squarer8=int64#7
  2898. # asm 2: adc <squarezero=%rsi,<squarer8=%rax
  2899. adc %rsi,%rax
  2900. # qhasm: squarer8 *= 38
  2901. # asm 1: imulq $38,<squarer8=int64#7,>squarer8=int64#3
  2902. # asm 2: imulq $38,<squarer8=%rax,>squarer8=%rdx
  2903. imulq $38,%rax,%rdx
  2904. # qhasm: carry? xq0 += squarer8
  2905. # asm 1: add <squarer8=int64#3,<xq0=int64#11
  2906. # asm 2: add <squarer8=%rdx,<xq0=%r13
  2907. add %rdx,%r13
  2908. # qhasm: carry? xq1 += squarezero + carry
  2909. # asm 1: adc <squarezero=int64#2,<xq1=int64#4
  2910. # asm 2: adc <squarezero=%rsi,<xq1=%rcx
  2911. adc %rsi,%rcx
  2912. # qhasm: carry? xq2 += squarezero + carry
  2913. # asm 1: adc <squarezero=int64#2,<xq2=int64#5
  2914. # asm 2: adc <squarezero=%rsi,<xq2=%r8
  2915. adc %rsi,%r8
  2916. # qhasm: carry? xq3 += squarezero + carry
  2917. # asm 1: adc <squarezero=int64#2,<xq3=int64#6
  2918. # asm 2: adc <squarezero=%rsi,<xq3=%r9
  2919. adc %rsi,%r9
  2920. # qhasm: squarezero += squarezero + carry
  2921. # asm 1: adc <squarezero=int64#2,<squarezero=int64#2
  2922. # asm 2: adc <squarezero=%rsi,<squarezero=%rsi
  2923. adc %rsi,%rsi
  2924. # qhasm: squarezero *= 38
  2925. # asm 1: imulq $38,<squarezero=int64#2,>squarezero=int64#2
  2926. # asm 2: imulq $38,<squarezero=%rsi,>squarezero=%rsi
  2927. imulq $38,%rsi,%rsi
  2928. # qhasm: xq0 += squarezero
  2929. # asm 1: add <squarezero=int64#2,<xq0=int64#11
  2930. # asm 2: add <squarezero=%rsi,<xq0=%r13
  2931. add %rsi,%r13
  2932. # qhasm: *(uint64 *)(workp + 96) = xq0
  2933. # asm 1: movq <xq0=int64#11,96(<workp=int64#1)
  2934. # asm 2: movq <xq0=%r13,96(<workp=%rdi)
  2935. movq %r13,96(%rdi)
  2936. # qhasm: *(uint64 *)(workp + 104) = xq1
  2937. # asm 1: movq <xq1=int64#4,104(<workp=int64#1)
  2938. # asm 2: movq <xq1=%rcx,104(<workp=%rdi)
  2939. movq %rcx,104(%rdi)
  2940. # qhasm: *(uint64 *)(workp + 112) = xq2
  2941. # asm 1: movq <xq2=int64#5,112(<workp=int64#1)
  2942. # asm 2: movq <xq2=%r8,112(<workp=%rdi)
  2943. movq %r8,112(%rdi)
  2944. # qhasm: *(uint64 *)(workp + 120) = xq3
  2945. # asm 1: movq <xq3=int64#6,120(<workp=int64#1)
  2946. # asm 2: movq <xq3=%r9,120(<workp=%rdi)
  2947. movq %r9,120(%rdi)
  2948. # qhasm: squarer7 = 0
  2949. # asm 1: mov $0,>squarer7=int64#2
  2950. # asm 2: mov $0,>squarer7=%rsi
  2951. mov $0,%rsi
  2952. # qhasm: squarerax = *(uint64 *)(workp + 136)
  2953. # asm 1: movq 136(<workp=int64#1),>squarerax=int64#7
  2954. # asm 2: movq 136(<workp=%rdi),>squarerax=%rax
  2955. movq 136(%rdi),%rax
  2956. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  2957. # asm 1: mulq 128(<workp=int64#1)
  2958. # asm 2: mulq 128(<workp=%rdi)
  2959. mulq 128(%rdi)
  2960. # qhasm: zq1 = squarerax
  2961. # asm 1: mov <squarerax=int64#7,>zq1=int64#4
  2962. # asm 2: mov <squarerax=%rax,>zq1=%rcx
  2963. mov %rax,%rcx
  2964. # qhasm: zq2 = squarerdx
  2965. # asm 1: mov <squarerdx=int64#3,>zq2=int64#5
  2966. # asm 2: mov <squarerdx=%rdx,>zq2=%r8
  2967. mov %rdx,%r8
  2968. # qhasm: squarerax = *(uint64 *)(workp + 144)
  2969. # asm 1: movq 144(<workp=int64#1),>squarerax=int64#7
  2970. # asm 2: movq 144(<workp=%rdi),>squarerax=%rax
  2971. movq 144(%rdi),%rax
  2972. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  2973. # asm 1: mulq 136(<workp=int64#1)
  2974. # asm 2: mulq 136(<workp=%rdi)
  2975. mulq 136(%rdi)
  2976. # qhasm: zq3 = squarerax
  2977. # asm 1: mov <squarerax=int64#7,>zq3=int64#6
  2978. # asm 2: mov <squarerax=%rax,>zq3=%r9
  2979. mov %rax,%r9
  2980. # qhasm: squarer4 = squarerdx
  2981. # asm 1: mov <squarerdx=int64#3,>squarer4=int64#8
  2982. # asm 2: mov <squarerdx=%rdx,>squarer4=%r10
  2983. mov %rdx,%r10
  2984. # qhasm: squarerax = *(uint64 *)(workp + 152)
  2985. # asm 1: movq 152(<workp=int64#1),>squarerax=int64#7
  2986. # asm 2: movq 152(<workp=%rdi),>squarerax=%rax
  2987. movq 152(%rdi),%rax
  2988. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  2989. # asm 1: mulq 144(<workp=int64#1)
  2990. # asm 2: mulq 144(<workp=%rdi)
  2991. mulq 144(%rdi)
  2992. # qhasm: squarer5 = squarerax
  2993. # asm 1: mov <squarerax=int64#7,>squarer5=int64#9
  2994. # asm 2: mov <squarerax=%rax,>squarer5=%r11
  2995. mov %rax,%r11
  2996. # qhasm: squarer6 = squarerdx
  2997. # asm 1: mov <squarerdx=int64#3,>squarer6=int64#10
  2998. # asm 2: mov <squarerdx=%rdx,>squarer6=%r12
  2999. mov %rdx,%r12
  3000. # qhasm: squarerax = *(uint64 *)(workp + 144)
  3001. # asm 1: movq 144(<workp=int64#1),>squarerax=int64#7
  3002. # asm 2: movq 144(<workp=%rdi),>squarerax=%rax
  3003. movq 144(%rdi),%rax
  3004. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  3005. # asm 1: mulq 128(<workp=int64#1)
  3006. # asm 2: mulq 128(<workp=%rdi)
  3007. mulq 128(%rdi)
  3008. # qhasm: carry? zq2 += squarerax
  3009. # asm 1: add <squarerax=int64#7,<zq2=int64#5
  3010. # asm 2: add <squarerax=%rax,<zq2=%r8
  3011. add %rax,%r8
  3012. # qhasm: carry? zq3 += squarerdx + carry
  3013. # asm 1: adc <squarerdx=int64#3,<zq3=int64#6
  3014. # asm 2: adc <squarerdx=%rdx,<zq3=%r9
  3015. adc %rdx,%r9
  3016. # qhasm: squarer4 += 0 + carry
  3017. # asm 1: adc $0,<squarer4=int64#8
  3018. # asm 2: adc $0,<squarer4=%r10
  3019. adc $0,%r10
  3020. # qhasm: squarerax = *(uint64 *)(workp + 152)
  3021. # asm 1: movq 152(<workp=int64#1),>squarerax=int64#7
  3022. # asm 2: movq 152(<workp=%rdi),>squarerax=%rax
  3023. movq 152(%rdi),%rax
  3024. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  3025. # asm 1: mulq 136(<workp=int64#1)
  3026. # asm 2: mulq 136(<workp=%rdi)
  3027. mulq 136(%rdi)
  3028. # qhasm: carry? squarer4 += squarerax
  3029. # asm 1: add <squarerax=int64#7,<squarer4=int64#8
  3030. # asm 2: add <squarerax=%rax,<squarer4=%r10
  3031. add %rax,%r10
  3032. # qhasm: carry? squarer5 += squarerdx + carry
  3033. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  3034. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  3035. adc %rdx,%r11
  3036. # qhasm: squarer6 += 0 + carry
  3037. # asm 1: adc $0,<squarer6=int64#10
  3038. # asm 2: adc $0,<squarer6=%r12
  3039. adc $0,%r12
  3040. # qhasm: squarerax = *(uint64 *)(workp + 152)
  3041. # asm 1: movq 152(<workp=int64#1),>squarerax=int64#7
  3042. # asm 2: movq 152(<workp=%rdi),>squarerax=%rax
  3043. movq 152(%rdi),%rax
  3044. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  3045. # asm 1: mulq 128(<workp=int64#1)
  3046. # asm 2: mulq 128(<workp=%rdi)
  3047. mulq 128(%rdi)
  3048. # qhasm: carry? zq3 += squarerax
  3049. # asm 1: add <squarerax=int64#7,<zq3=int64#6
  3050. # asm 2: add <squarerax=%rax,<zq3=%r9
  3051. add %rax,%r9
  3052. # qhasm: carry? squarer4 += squarerdx + carry
  3053. # asm 1: adc <squarerdx=int64#3,<squarer4=int64#8
  3054. # asm 2: adc <squarerdx=%rdx,<squarer4=%r10
  3055. adc %rdx,%r10
  3056. # qhasm: carry? squarer5 += 0 + carry
  3057. # asm 1: adc $0,<squarer5=int64#9
  3058. # asm 2: adc $0,<squarer5=%r11
  3059. adc $0,%r11
  3060. # qhasm: carry? squarer6 += 0 + carry
  3061. # asm 1: adc $0,<squarer6=int64#10
  3062. # asm 2: adc $0,<squarer6=%r12
  3063. adc $0,%r12
  3064. # qhasm: squarer7 += 0 + carry
  3065. # asm 1: adc $0,<squarer7=int64#2
  3066. # asm 2: adc $0,<squarer7=%rsi
  3067. adc $0,%rsi
  3068. # qhasm: carry? zq1 += zq1
  3069. # asm 1: add <zq1=int64#4,<zq1=int64#4
  3070. # asm 2: add <zq1=%rcx,<zq1=%rcx
  3071. add %rcx,%rcx
  3072. # qhasm: carry? zq2 += zq2 + carry
  3073. # asm 1: adc <zq2=int64#5,<zq2=int64#5
  3074. # asm 2: adc <zq2=%r8,<zq2=%r8
  3075. adc %r8,%r8
  3076. # qhasm: carry? zq3 += zq3 + carry
  3077. # asm 1: adc <zq3=int64#6,<zq3=int64#6
  3078. # asm 2: adc <zq3=%r9,<zq3=%r9
  3079. adc %r9,%r9
  3080. # qhasm: carry? squarer4 += squarer4 + carry
  3081. # asm 1: adc <squarer4=int64#8,<squarer4=int64#8
  3082. # asm 2: adc <squarer4=%r10,<squarer4=%r10
  3083. adc %r10,%r10
  3084. # qhasm: carry? squarer5 += squarer5 + carry
  3085. # asm 1: adc <squarer5=int64#9,<squarer5=int64#9
  3086. # asm 2: adc <squarer5=%r11,<squarer5=%r11
  3087. adc %r11,%r11
  3088. # qhasm: carry? squarer6 += squarer6 + carry
  3089. # asm 1: adc <squarer6=int64#10,<squarer6=int64#10
  3090. # asm 2: adc <squarer6=%r12,<squarer6=%r12
  3091. adc %r12,%r12
  3092. # qhasm: squarer7 += squarer7 + carry
  3093. # asm 1: adc <squarer7=int64#2,<squarer7=int64#2
  3094. # asm 2: adc <squarer7=%rsi,<squarer7=%rsi
  3095. adc %rsi,%rsi
  3096. # qhasm: squarerax = *(uint64 *)(workp + 128)
  3097. # asm 1: movq 128(<workp=int64#1),>squarerax=int64#7
  3098. # asm 2: movq 128(<workp=%rdi),>squarerax=%rax
  3099. movq 128(%rdi),%rax
  3100. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  3101. # asm 1: mulq 128(<workp=int64#1)
  3102. # asm 2: mulq 128(<workp=%rdi)
  3103. mulq 128(%rdi)
  3104. # qhasm: zq0 = squarerax
  3105. # asm 1: mov <squarerax=int64#7,>zq0=int64#11
  3106. # asm 2: mov <squarerax=%rax,>zq0=%r13
  3107. mov %rax,%r13
  3108. # qhasm: squaret1 = squarerdx
  3109. # asm 1: mov <squarerdx=int64#3,>squaret1=int64#12
  3110. # asm 2: mov <squarerdx=%rdx,>squaret1=%r14
  3111. mov %rdx,%r14
  3112. # qhasm: squarerax = *(uint64 *)(workp + 136)
  3113. # asm 1: movq 136(<workp=int64#1),>squarerax=int64#7
  3114. # asm 2: movq 136(<workp=%rdi),>squarerax=%rax
  3115. movq 136(%rdi),%rax
  3116. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  3117. # asm 1: mulq 136(<workp=int64#1)
  3118. # asm 2: mulq 136(<workp=%rdi)
  3119. mulq 136(%rdi)
  3120. # qhasm: squaret2 = squarerax
  3121. # asm 1: mov <squarerax=int64#7,>squaret2=int64#13
  3122. # asm 2: mov <squarerax=%rax,>squaret2=%r15
  3123. mov %rax,%r15
  3124. # qhasm: squaret3 = squarerdx
  3125. # asm 1: mov <squarerdx=int64#3,>squaret3=int64#14
  3126. # asm 2: mov <squarerdx=%rdx,>squaret3=%rbx
  3127. mov %rdx,%rbx
  3128. # qhasm: squarerax = *(uint64 *)(workp + 144)
  3129. # asm 1: movq 144(<workp=int64#1),>squarerax=int64#7
  3130. # asm 2: movq 144(<workp=%rdi),>squarerax=%rax
  3131. movq 144(%rdi),%rax
  3132. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  3133. # asm 1: mulq 144(<workp=int64#1)
  3134. # asm 2: mulq 144(<workp=%rdi)
  3135. mulq 144(%rdi)
  3136. # qhasm: carry? zq1 += squaret1
  3137. # asm 1: add <squaret1=int64#12,<zq1=int64#4
  3138. # asm 2: add <squaret1=%r14,<zq1=%rcx
  3139. add %r14,%rcx
  3140. # qhasm: carry? zq2 += squaret2 + carry
  3141. # asm 1: adc <squaret2=int64#13,<zq2=int64#5
  3142. # asm 2: adc <squaret2=%r15,<zq2=%r8
  3143. adc %r15,%r8
  3144. # qhasm: carry? zq3 += squaret3 + carry
  3145. # asm 1: adc <squaret3=int64#14,<zq3=int64#6
  3146. # asm 2: adc <squaret3=%rbx,<zq3=%r9
  3147. adc %rbx,%r9
  3148. # qhasm: carry? squarer4 += squarerax + carry
  3149. # asm 1: adc <squarerax=int64#7,<squarer4=int64#8
  3150. # asm 2: adc <squarerax=%rax,<squarer4=%r10
  3151. adc %rax,%r10
  3152. # qhasm: carry? squarer5 += squarerdx + carry
  3153. # asm 1: adc <squarerdx=int64#3,<squarer5=int64#9
  3154. # asm 2: adc <squarerdx=%rdx,<squarer5=%r11
  3155. adc %rdx,%r11
  3156. # qhasm: carry? squarer6 += 0 + carry
  3157. # asm 1: adc $0,<squarer6=int64#10
  3158. # asm 2: adc $0,<squarer6=%r12
  3159. adc $0,%r12
  3160. # qhasm: squarer7 += 0 + carry
  3161. # asm 1: adc $0,<squarer7=int64#2
  3162. # asm 2: adc $0,<squarer7=%rsi
  3163. adc $0,%rsi
  3164. # qhasm: squarerax = *(uint64 *)(workp + 152)
  3165. # asm 1: movq 152(<workp=int64#1),>squarerax=int64#7
  3166. # asm 2: movq 152(<workp=%rdi),>squarerax=%rax
  3167. movq 152(%rdi),%rax
  3168. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  3169. # asm 1: mulq 152(<workp=int64#1)
  3170. # asm 2: mulq 152(<workp=%rdi)
  3171. mulq 152(%rdi)
  3172. # qhasm: carry? squarer6 += squarerax
  3173. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  3174. # asm 2: add <squarerax=%rax,<squarer6=%r12
  3175. add %rax,%r12
  3176. # qhasm: squarer7 += squarerdx + carry
  3177. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  3178. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  3179. adc %rdx,%rsi
  3180. # qhasm: squarerax = squarer4
  3181. # asm 1: mov <squarer4=int64#8,>squarerax=int64#7
  3182. # asm 2: mov <squarer4=%r10,>squarerax=%rax
  3183. mov %r10,%rax
  3184. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3185. mulq crypto_scalarmult_curve25519_amd64_64_38
  3186. # qhasm: squarer4 = squarerax
  3187. # asm 1: mov <squarerax=int64#7,>squarer4=int64#8
  3188. # asm 2: mov <squarerax=%rax,>squarer4=%r10
  3189. mov %rax,%r10
  3190. # qhasm: squarerax = squarer5
  3191. # asm 1: mov <squarer5=int64#9,>squarerax=int64#7
  3192. # asm 2: mov <squarer5=%r11,>squarerax=%rax
  3193. mov %r11,%rax
  3194. # qhasm: squarer5 = squarerdx
  3195. # asm 1: mov <squarerdx=int64#3,>squarer5=int64#9
  3196. # asm 2: mov <squarerdx=%rdx,>squarer5=%r11
  3197. mov %rdx,%r11
  3198. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3199. mulq crypto_scalarmult_curve25519_amd64_64_38
  3200. # qhasm: carry? squarer5 += squarerax
  3201. # asm 1: add <squarerax=int64#7,<squarer5=int64#9
  3202. # asm 2: add <squarerax=%rax,<squarer5=%r11
  3203. add %rax,%r11
  3204. # qhasm: squarerax = squarer6
  3205. # asm 1: mov <squarer6=int64#10,>squarerax=int64#7
  3206. # asm 2: mov <squarer6=%r12,>squarerax=%rax
  3207. mov %r12,%rax
  3208. # qhasm: squarer6 = 0
  3209. # asm 1: mov $0,>squarer6=int64#10
  3210. # asm 2: mov $0,>squarer6=%r12
  3211. mov $0,%r12
  3212. # qhasm: squarer6 += squarerdx + carry
  3213. # asm 1: adc <squarerdx=int64#3,<squarer6=int64#10
  3214. # asm 2: adc <squarerdx=%rdx,<squarer6=%r12
  3215. adc %rdx,%r12
  3216. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3217. mulq crypto_scalarmult_curve25519_amd64_64_38
  3218. # qhasm: carry? squarer6 += squarerax
  3219. # asm 1: add <squarerax=int64#7,<squarer6=int64#10
  3220. # asm 2: add <squarerax=%rax,<squarer6=%r12
  3221. add %rax,%r12
  3222. # qhasm: squarerax = squarer7
  3223. # asm 1: mov <squarer7=int64#2,>squarerax=int64#7
  3224. # asm 2: mov <squarer7=%rsi,>squarerax=%rax
  3225. mov %rsi,%rax
  3226. # qhasm: squarer7 = 0
  3227. # asm 1: mov $0,>squarer7=int64#2
  3228. # asm 2: mov $0,>squarer7=%rsi
  3229. mov $0,%rsi
  3230. # qhasm: squarer7 += squarerdx + carry
  3231. # asm 1: adc <squarerdx=int64#3,<squarer7=int64#2
  3232. # asm 2: adc <squarerdx=%rdx,<squarer7=%rsi
  3233. adc %rdx,%rsi
  3234. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3235. mulq crypto_scalarmult_curve25519_amd64_64_38
  3236. # qhasm: carry? squarer7 += squarerax
  3237. # asm 1: add <squarerax=int64#7,<squarer7=int64#2
  3238. # asm 2: add <squarerax=%rax,<squarer7=%rsi
  3239. add %rax,%rsi
  3240. # qhasm: squarer8 = 0
  3241. # asm 1: mov $0,>squarer8=int64#7
  3242. # asm 2: mov $0,>squarer8=%rax
  3243. mov $0,%rax
  3244. # qhasm: squarer8 += squarerdx + carry
  3245. # asm 1: adc <squarerdx=int64#3,<squarer8=int64#7
  3246. # asm 2: adc <squarerdx=%rdx,<squarer8=%rax
  3247. adc %rdx,%rax
  3248. # qhasm: carry? zq0 += squarer4
  3249. # asm 1: add <squarer4=int64#8,<zq0=int64#11
  3250. # asm 2: add <squarer4=%r10,<zq0=%r13
  3251. add %r10,%r13
  3252. # qhasm: carry? zq1 += squarer5 + carry
  3253. # asm 1: adc <squarer5=int64#9,<zq1=int64#4
  3254. # asm 2: adc <squarer5=%r11,<zq1=%rcx
  3255. adc %r11,%rcx
  3256. # qhasm: carry? zq2 += squarer6 + carry
  3257. # asm 1: adc <squarer6=int64#10,<zq2=int64#5
  3258. # asm 2: adc <squarer6=%r12,<zq2=%r8
  3259. adc %r12,%r8
  3260. # qhasm: carry? zq3 += squarer7 + carry
  3261. # asm 1: adc <squarer7=int64#2,<zq3=int64#6
  3262. # asm 2: adc <squarer7=%rsi,<zq3=%r9
  3263. adc %rsi,%r9
  3264. # qhasm: squarezero = 0
  3265. # asm 1: mov $0,>squarezero=int64#2
  3266. # asm 2: mov $0,>squarezero=%rsi
  3267. mov $0,%rsi
  3268. # qhasm: squarer8 += squarezero + carry
  3269. # asm 1: adc <squarezero=int64#2,<squarer8=int64#7
  3270. # asm 2: adc <squarezero=%rsi,<squarer8=%rax
  3271. adc %rsi,%rax
  3272. # qhasm: squarer8 *= 38
  3273. # asm 1: imulq $38,<squarer8=int64#7,>squarer8=int64#3
  3274. # asm 2: imulq $38,<squarer8=%rax,>squarer8=%rdx
  3275. imulq $38,%rax,%rdx
  3276. # qhasm: carry? zq0 += squarer8
  3277. # asm 1: add <squarer8=int64#3,<zq0=int64#11
  3278. # asm 2: add <squarer8=%rdx,<zq0=%r13
  3279. add %rdx,%r13
  3280. # qhasm: carry? zq1 += squarezero + carry
  3281. # asm 1: adc <squarezero=int64#2,<zq1=int64#4
  3282. # asm 2: adc <squarezero=%rsi,<zq1=%rcx
  3283. adc %rsi,%rcx
  3284. # qhasm: carry? zq2 += squarezero + carry
  3285. # asm 1: adc <squarezero=int64#2,<zq2=int64#5
  3286. # asm 2: adc <squarezero=%rsi,<zq2=%r8
  3287. adc %rsi,%r8
  3288. # qhasm: carry? zq3 += squarezero + carry
  3289. # asm 1: adc <squarezero=int64#2,<zq3=int64#6
  3290. # asm 2: adc <squarezero=%rsi,<zq3=%r9
  3291. adc %rsi,%r9
  3292. # qhasm: squarezero += squarezero + carry
  3293. # asm 1: adc <squarezero=int64#2,<squarezero=int64#2
  3294. # asm 2: adc <squarezero=%rsi,<squarezero=%rsi
  3295. adc %rsi,%rsi
  3296. # qhasm: squarezero *= 38
  3297. # asm 1: imulq $38,<squarezero=int64#2,>squarezero=int64#2
  3298. # asm 2: imulq $38,<squarezero=%rsi,>squarezero=%rsi
  3299. imulq $38,%rsi,%rsi
  3300. # qhasm: zq0 += squarezero
  3301. # asm 1: add <squarezero=int64#2,<zq0=int64#11
  3302. # asm 2: add <squarezero=%rsi,<zq0=%r13
  3303. add %rsi,%r13
  3304. # qhasm: *(uint64 *)(workp + 128) = zq0
  3305. # asm 1: movq <zq0=int64#11,128(<workp=int64#1)
  3306. # asm 2: movq <zq0=%r13,128(<workp=%rdi)
  3307. movq %r13,128(%rdi)
  3308. # qhasm: *(uint64 *)(workp + 136) = zq1
  3309. # asm 1: movq <zq1=int64#4,136(<workp=int64#1)
  3310. # asm 2: movq <zq1=%rcx,136(<workp=%rdi)
  3311. movq %rcx,136(%rdi)
  3312. # qhasm: *(uint64 *)(workp + 144) = zq2
  3313. # asm 1: movq <zq2=int64#5,144(<workp=int64#1)
  3314. # asm 2: movq <zq2=%r8,144(<workp=%rdi)
  3315. movq %r8,144(%rdi)
  3316. # qhasm: *(uint64 *)(workp + 152) = zq3
  3317. # asm 1: movq <zq3=int64#6,152(<workp=int64#1)
  3318. # asm 2: movq <zq3=%r9,152(<workp=%rdi)
  3319. movq %r9,152(%rdi)
  3320. # qhasm: mulr4 = 0
  3321. # asm 1: mov $0,>mulr4=int64#2
  3322. # asm 2: mov $0,>mulr4=%rsi
  3323. mov $0,%rsi
  3324. # qhasm: mulr5 = 0
  3325. # asm 1: mov $0,>mulr5=int64#4
  3326. # asm 2: mov $0,>mulr5=%rcx
  3327. mov $0,%rcx
  3328. # qhasm: mulr6 = 0
  3329. # asm 1: mov $0,>mulr6=int64#5
  3330. # asm 2: mov $0,>mulr6=%r8
  3331. mov $0,%r8
  3332. # qhasm: mulr7 = 0
  3333. # asm 1: mov $0,>mulr7=int64#6
  3334. # asm 2: mov $0,>mulr7=%r9
  3335. mov $0,%r9
  3336. # qhasm: mulx0 = *(uint64 *)(workp + 128)
  3337. # asm 1: movq 128(<workp=int64#1),>mulx0=int64#8
  3338. # asm 2: movq 128(<workp=%rdi),>mulx0=%r10
  3339. movq 128(%rdi),%r10
  3340. # qhasm: mulrax = *(uint64 *)(workp + 0)
  3341. # asm 1: movq 0(<workp=int64#1),>mulrax=int64#7
  3342. # asm 2: movq 0(<workp=%rdi),>mulrax=%rax
  3343. movq 0(%rdi),%rax
  3344. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3345. # asm 1: mul <mulx0=int64#8
  3346. # asm 2: mul <mulx0=%r10
  3347. mul %r10
  3348. # qhasm: zq0 = mulrax
  3349. # asm 1: mov <mulrax=int64#7,>zq0=int64#9
  3350. # asm 2: mov <mulrax=%rax,>zq0=%r11
  3351. mov %rax,%r11
  3352. # qhasm: zq1 = mulrdx
  3353. # asm 1: mov <mulrdx=int64#3,>zq1=int64#10
  3354. # asm 2: mov <mulrdx=%rdx,>zq1=%r12
  3355. mov %rdx,%r12
  3356. # qhasm: mulrax = *(uint64 *)(workp + 8)
  3357. # asm 1: movq 8(<workp=int64#1),>mulrax=int64#7
  3358. # asm 2: movq 8(<workp=%rdi),>mulrax=%rax
  3359. movq 8(%rdi),%rax
  3360. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3361. # asm 1: mul <mulx0=int64#8
  3362. # asm 2: mul <mulx0=%r10
  3363. mul %r10
  3364. # qhasm: carry? zq1 += mulrax
  3365. # asm 1: add <mulrax=int64#7,<zq1=int64#10
  3366. # asm 2: add <mulrax=%rax,<zq1=%r12
  3367. add %rax,%r12
  3368. # qhasm: zq2 = 0
  3369. # asm 1: mov $0,>zq2=int64#11
  3370. # asm 2: mov $0,>zq2=%r13
  3371. mov $0,%r13
  3372. # qhasm: zq2 += mulrdx + carry
  3373. # asm 1: adc <mulrdx=int64#3,<zq2=int64#11
  3374. # asm 2: adc <mulrdx=%rdx,<zq2=%r13
  3375. adc %rdx,%r13
  3376. # qhasm: mulrax = *(uint64 *)(workp + 16)
  3377. # asm 1: movq 16(<workp=int64#1),>mulrax=int64#7
  3378. # asm 2: movq 16(<workp=%rdi),>mulrax=%rax
  3379. movq 16(%rdi),%rax
  3380. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3381. # asm 1: mul <mulx0=int64#8
  3382. # asm 2: mul <mulx0=%r10
  3383. mul %r10
  3384. # qhasm: carry? zq2 += mulrax
  3385. # asm 1: add <mulrax=int64#7,<zq2=int64#11
  3386. # asm 2: add <mulrax=%rax,<zq2=%r13
  3387. add %rax,%r13
  3388. # qhasm: zq3 = 0
  3389. # asm 1: mov $0,>zq3=int64#12
  3390. # asm 2: mov $0,>zq3=%r14
  3391. mov $0,%r14
  3392. # qhasm: zq3 += mulrdx + carry
  3393. # asm 1: adc <mulrdx=int64#3,<zq3=int64#12
  3394. # asm 2: adc <mulrdx=%rdx,<zq3=%r14
  3395. adc %rdx,%r14
  3396. # qhasm: mulrax = *(uint64 *)(workp + 24)
  3397. # asm 1: movq 24(<workp=int64#1),>mulrax=int64#7
  3398. # asm 2: movq 24(<workp=%rdi),>mulrax=%rax
  3399. movq 24(%rdi),%rax
  3400. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3401. # asm 1: mul <mulx0=int64#8
  3402. # asm 2: mul <mulx0=%r10
  3403. mul %r10
  3404. # qhasm: carry? zq3 += mulrax
  3405. # asm 1: add <mulrax=int64#7,<zq3=int64#12
  3406. # asm 2: add <mulrax=%rax,<zq3=%r14
  3407. add %rax,%r14
  3408. # qhasm: mulr4 += mulrdx + carry
  3409. # asm 1: adc <mulrdx=int64#3,<mulr4=int64#2
  3410. # asm 2: adc <mulrdx=%rdx,<mulr4=%rsi
  3411. adc %rdx,%rsi
  3412. # qhasm: mulx1 = *(uint64 *)(workp + 136)
  3413. # asm 1: movq 136(<workp=int64#1),>mulx1=int64#8
  3414. # asm 2: movq 136(<workp=%rdi),>mulx1=%r10
  3415. movq 136(%rdi),%r10
  3416. # qhasm: mulrax = *(uint64 *)(workp + 0)
  3417. # asm 1: movq 0(<workp=int64#1),>mulrax=int64#7
  3418. # asm 2: movq 0(<workp=%rdi),>mulrax=%rax
  3419. movq 0(%rdi),%rax
  3420. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3421. # asm 1: mul <mulx1=int64#8
  3422. # asm 2: mul <mulx1=%r10
  3423. mul %r10
  3424. # qhasm: carry? zq1 += mulrax
  3425. # asm 1: add <mulrax=int64#7,<zq1=int64#10
  3426. # asm 2: add <mulrax=%rax,<zq1=%r12
  3427. add %rax,%r12
  3428. # qhasm: mulc = 0
  3429. # asm 1: mov $0,>mulc=int64#13
  3430. # asm 2: mov $0,>mulc=%r15
  3431. mov $0,%r15
  3432. # qhasm: mulc += mulrdx + carry
  3433. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3434. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3435. adc %rdx,%r15
  3436. # qhasm: mulrax = *(uint64 *)(workp + 8)
  3437. # asm 1: movq 8(<workp=int64#1),>mulrax=int64#7
  3438. # asm 2: movq 8(<workp=%rdi),>mulrax=%rax
  3439. movq 8(%rdi),%rax
  3440. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3441. # asm 1: mul <mulx1=int64#8
  3442. # asm 2: mul <mulx1=%r10
  3443. mul %r10
  3444. # qhasm: carry? zq2 += mulrax
  3445. # asm 1: add <mulrax=int64#7,<zq2=int64#11
  3446. # asm 2: add <mulrax=%rax,<zq2=%r13
  3447. add %rax,%r13
  3448. # qhasm: mulrdx += 0 + carry
  3449. # asm 1: adc $0,<mulrdx=int64#3
  3450. # asm 2: adc $0,<mulrdx=%rdx
  3451. adc $0,%rdx
  3452. # qhasm: carry? zq2 += mulc
  3453. # asm 1: add <mulc=int64#13,<zq2=int64#11
  3454. # asm 2: add <mulc=%r15,<zq2=%r13
  3455. add %r15,%r13
  3456. # qhasm: mulc = 0
  3457. # asm 1: mov $0,>mulc=int64#13
  3458. # asm 2: mov $0,>mulc=%r15
  3459. mov $0,%r15
  3460. # qhasm: mulc += mulrdx + carry
  3461. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3462. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3463. adc %rdx,%r15
  3464. # qhasm: mulrax = *(uint64 *)(workp + 16)
  3465. # asm 1: movq 16(<workp=int64#1),>mulrax=int64#7
  3466. # asm 2: movq 16(<workp=%rdi),>mulrax=%rax
  3467. movq 16(%rdi),%rax
  3468. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3469. # asm 1: mul <mulx1=int64#8
  3470. # asm 2: mul <mulx1=%r10
  3471. mul %r10
  3472. # qhasm: carry? zq3 += mulrax
  3473. # asm 1: add <mulrax=int64#7,<zq3=int64#12
  3474. # asm 2: add <mulrax=%rax,<zq3=%r14
  3475. add %rax,%r14
  3476. # qhasm: mulrdx += 0 + carry
  3477. # asm 1: adc $0,<mulrdx=int64#3
  3478. # asm 2: adc $0,<mulrdx=%rdx
  3479. adc $0,%rdx
  3480. # qhasm: carry? zq3 += mulc
  3481. # asm 1: add <mulc=int64#13,<zq3=int64#12
  3482. # asm 2: add <mulc=%r15,<zq3=%r14
  3483. add %r15,%r14
  3484. # qhasm: mulc = 0
  3485. # asm 1: mov $0,>mulc=int64#13
  3486. # asm 2: mov $0,>mulc=%r15
  3487. mov $0,%r15
  3488. # qhasm: mulc += mulrdx + carry
  3489. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3490. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3491. adc %rdx,%r15
  3492. # qhasm: mulrax = *(uint64 *)(workp + 24)
  3493. # asm 1: movq 24(<workp=int64#1),>mulrax=int64#7
  3494. # asm 2: movq 24(<workp=%rdi),>mulrax=%rax
  3495. movq 24(%rdi),%rax
  3496. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3497. # asm 1: mul <mulx1=int64#8
  3498. # asm 2: mul <mulx1=%r10
  3499. mul %r10
  3500. # qhasm: carry? mulr4 += mulrax
  3501. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  3502. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  3503. add %rax,%rsi
  3504. # qhasm: mulrdx += 0 + carry
  3505. # asm 1: adc $0,<mulrdx=int64#3
  3506. # asm 2: adc $0,<mulrdx=%rdx
  3507. adc $0,%rdx
  3508. # qhasm: carry? mulr4 += mulc
  3509. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  3510. # asm 2: add <mulc=%r15,<mulr4=%rsi
  3511. add %r15,%rsi
  3512. # qhasm: mulr5 += mulrdx + carry
  3513. # asm 1: adc <mulrdx=int64#3,<mulr5=int64#4
  3514. # asm 2: adc <mulrdx=%rdx,<mulr5=%rcx
  3515. adc %rdx,%rcx
  3516. # qhasm: mulx2 = *(uint64 *)(workp + 144)
  3517. # asm 1: movq 144(<workp=int64#1),>mulx2=int64#8
  3518. # asm 2: movq 144(<workp=%rdi),>mulx2=%r10
  3519. movq 144(%rdi),%r10
  3520. # qhasm: mulrax = *(uint64 *)(workp + 0)
  3521. # asm 1: movq 0(<workp=int64#1),>mulrax=int64#7
  3522. # asm 2: movq 0(<workp=%rdi),>mulrax=%rax
  3523. movq 0(%rdi),%rax
  3524. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  3525. # asm 1: mul <mulx2=int64#8
  3526. # asm 2: mul <mulx2=%r10
  3527. mul %r10
  3528. # qhasm: carry? zq2 += mulrax
  3529. # asm 1: add <mulrax=int64#7,<zq2=int64#11
  3530. # asm 2: add <mulrax=%rax,<zq2=%r13
  3531. add %rax,%r13
  3532. # qhasm: mulc = 0
  3533. # asm 1: mov $0,>mulc=int64#13
  3534. # asm 2: mov $0,>mulc=%r15
  3535. mov $0,%r15
  3536. # qhasm: mulc += mulrdx + carry
  3537. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3538. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3539. adc %rdx,%r15
  3540. # qhasm: mulrax = *(uint64 *)(workp + 8)
  3541. # asm 1: movq 8(<workp=int64#1),>mulrax=int64#7
  3542. # asm 2: movq 8(<workp=%rdi),>mulrax=%rax
  3543. movq 8(%rdi),%rax
  3544. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  3545. # asm 1: mul <mulx2=int64#8
  3546. # asm 2: mul <mulx2=%r10
  3547. mul %r10
  3548. # qhasm: carry? zq3 += mulrax
  3549. # asm 1: add <mulrax=int64#7,<zq3=int64#12
  3550. # asm 2: add <mulrax=%rax,<zq3=%r14
  3551. add %rax,%r14
  3552. # qhasm: mulrdx += 0 + carry
  3553. # asm 1: adc $0,<mulrdx=int64#3
  3554. # asm 2: adc $0,<mulrdx=%rdx
  3555. adc $0,%rdx
  3556. # qhasm: carry? zq3 += mulc
  3557. # asm 1: add <mulc=int64#13,<zq3=int64#12
  3558. # asm 2: add <mulc=%r15,<zq3=%r14
  3559. add %r15,%r14
  3560. # qhasm: mulc = 0
  3561. # asm 1: mov $0,>mulc=int64#13
  3562. # asm 2: mov $0,>mulc=%r15
  3563. mov $0,%r15
  3564. # qhasm: mulc += mulrdx + carry
  3565. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3566. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3567. adc %rdx,%r15
  3568. # qhasm: mulrax = *(uint64 *)(workp + 16)
  3569. # asm 1: movq 16(<workp=int64#1),>mulrax=int64#7
  3570. # asm 2: movq 16(<workp=%rdi),>mulrax=%rax
  3571. movq 16(%rdi),%rax
  3572. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  3573. # asm 1: mul <mulx2=int64#8
  3574. # asm 2: mul <mulx2=%r10
  3575. mul %r10
  3576. # qhasm: carry? mulr4 += mulrax
  3577. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  3578. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  3579. add %rax,%rsi
  3580. # qhasm: mulrdx += 0 + carry
  3581. # asm 1: adc $0,<mulrdx=int64#3
  3582. # asm 2: adc $0,<mulrdx=%rdx
  3583. adc $0,%rdx
  3584. # qhasm: carry? mulr4 += mulc
  3585. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  3586. # asm 2: add <mulc=%r15,<mulr4=%rsi
  3587. add %r15,%rsi
  3588. # qhasm: mulc = 0
  3589. # asm 1: mov $0,>mulc=int64#13
  3590. # asm 2: mov $0,>mulc=%r15
  3591. mov $0,%r15
  3592. # qhasm: mulc += mulrdx + carry
  3593. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3594. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3595. adc %rdx,%r15
  3596. # qhasm: mulrax = *(uint64 *)(workp + 24)
  3597. # asm 1: movq 24(<workp=int64#1),>mulrax=int64#7
  3598. # asm 2: movq 24(<workp=%rdi),>mulrax=%rax
  3599. movq 24(%rdi),%rax
  3600. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  3601. # asm 1: mul <mulx2=int64#8
  3602. # asm 2: mul <mulx2=%r10
  3603. mul %r10
  3604. # qhasm: carry? mulr5 += mulrax
  3605. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  3606. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  3607. add %rax,%rcx
  3608. # qhasm: mulrdx += 0 + carry
  3609. # asm 1: adc $0,<mulrdx=int64#3
  3610. # asm 2: adc $0,<mulrdx=%rdx
  3611. adc $0,%rdx
  3612. # qhasm: carry? mulr5 += mulc
  3613. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  3614. # asm 2: add <mulc=%r15,<mulr5=%rcx
  3615. add %r15,%rcx
  3616. # qhasm: mulr6 += mulrdx + carry
  3617. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  3618. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  3619. adc %rdx,%r8
  3620. # qhasm: mulx3 = *(uint64 *)(workp + 152)
  3621. # asm 1: movq 152(<workp=int64#1),>mulx3=int64#8
  3622. # asm 2: movq 152(<workp=%rdi),>mulx3=%r10
  3623. movq 152(%rdi),%r10
  3624. # qhasm: mulrax = *(uint64 *)(workp + 0)
  3625. # asm 1: movq 0(<workp=int64#1),>mulrax=int64#7
  3626. # asm 2: movq 0(<workp=%rdi),>mulrax=%rax
  3627. movq 0(%rdi),%rax
  3628. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  3629. # asm 1: mul <mulx3=int64#8
  3630. # asm 2: mul <mulx3=%r10
  3631. mul %r10
  3632. # qhasm: carry? zq3 += mulrax
  3633. # asm 1: add <mulrax=int64#7,<zq3=int64#12
  3634. # asm 2: add <mulrax=%rax,<zq3=%r14
  3635. add %rax,%r14
  3636. # qhasm: mulc = 0
  3637. # asm 1: mov $0,>mulc=int64#13
  3638. # asm 2: mov $0,>mulc=%r15
  3639. mov $0,%r15
  3640. # qhasm: mulc += mulrdx + carry
  3641. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3642. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3643. adc %rdx,%r15
  3644. # qhasm: mulrax = *(uint64 *)(workp + 8)
  3645. # asm 1: movq 8(<workp=int64#1),>mulrax=int64#7
  3646. # asm 2: movq 8(<workp=%rdi),>mulrax=%rax
  3647. movq 8(%rdi),%rax
  3648. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  3649. # asm 1: mul <mulx3=int64#8
  3650. # asm 2: mul <mulx3=%r10
  3651. mul %r10
  3652. # qhasm: carry? mulr4 += mulrax
  3653. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  3654. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  3655. add %rax,%rsi
  3656. # qhasm: mulrdx += 0 + carry
  3657. # asm 1: adc $0,<mulrdx=int64#3
  3658. # asm 2: adc $0,<mulrdx=%rdx
  3659. adc $0,%rdx
  3660. # qhasm: carry? mulr4 += mulc
  3661. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  3662. # asm 2: add <mulc=%r15,<mulr4=%rsi
  3663. add %r15,%rsi
  3664. # qhasm: mulc = 0
  3665. # asm 1: mov $0,>mulc=int64#13
  3666. # asm 2: mov $0,>mulc=%r15
  3667. mov $0,%r15
  3668. # qhasm: mulc += mulrdx + carry
  3669. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3670. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3671. adc %rdx,%r15
  3672. # qhasm: mulrax = *(uint64 *)(workp + 16)
  3673. # asm 1: movq 16(<workp=int64#1),>mulrax=int64#7
  3674. # asm 2: movq 16(<workp=%rdi),>mulrax=%rax
  3675. movq 16(%rdi),%rax
  3676. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  3677. # asm 1: mul <mulx3=int64#8
  3678. # asm 2: mul <mulx3=%r10
  3679. mul %r10
  3680. # qhasm: carry? mulr5 += mulrax
  3681. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  3682. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  3683. add %rax,%rcx
  3684. # qhasm: mulrdx += 0 + carry
  3685. # asm 1: adc $0,<mulrdx=int64#3
  3686. # asm 2: adc $0,<mulrdx=%rdx
  3687. adc $0,%rdx
  3688. # qhasm: carry? mulr5 += mulc
  3689. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  3690. # asm 2: add <mulc=%r15,<mulr5=%rcx
  3691. add %r15,%rcx
  3692. # qhasm: mulc = 0
  3693. # asm 1: mov $0,>mulc=int64#13
  3694. # asm 2: mov $0,>mulc=%r15
  3695. mov $0,%r15
  3696. # qhasm: mulc += mulrdx + carry
  3697. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3698. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3699. adc %rdx,%r15
  3700. # qhasm: mulrax = *(uint64 *)(workp + 24)
  3701. # asm 1: movq 24(<workp=int64#1),>mulrax=int64#7
  3702. # asm 2: movq 24(<workp=%rdi),>mulrax=%rax
  3703. movq 24(%rdi),%rax
  3704. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  3705. # asm 1: mul <mulx3=int64#8
  3706. # asm 2: mul <mulx3=%r10
  3707. mul %r10
  3708. # qhasm: carry? mulr6 += mulrax
  3709. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  3710. # asm 2: add <mulrax=%rax,<mulr6=%r8
  3711. add %rax,%r8
  3712. # qhasm: mulrdx += 0 + carry
  3713. # asm 1: adc $0,<mulrdx=int64#3
  3714. # asm 2: adc $0,<mulrdx=%rdx
  3715. adc $0,%rdx
  3716. # qhasm: carry? mulr6 += mulc
  3717. # asm 1: add <mulc=int64#13,<mulr6=int64#5
  3718. # asm 2: add <mulc=%r15,<mulr6=%r8
  3719. add %r15,%r8
  3720. # qhasm: mulr7 += mulrdx + carry
  3721. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  3722. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  3723. adc %rdx,%r9
  3724. # qhasm: mulrax = mulr4
  3725. # asm 1: mov <mulr4=int64#2,>mulrax=int64#7
  3726. # asm 2: mov <mulr4=%rsi,>mulrax=%rax
  3727. mov %rsi,%rax
  3728. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3729. mulq crypto_scalarmult_curve25519_amd64_64_38
  3730. # qhasm: mulr4 = mulrax
  3731. # asm 1: mov <mulrax=int64#7,>mulr4=int64#2
  3732. # asm 2: mov <mulrax=%rax,>mulr4=%rsi
  3733. mov %rax,%rsi
  3734. # qhasm: mulrax = mulr5
  3735. # asm 1: mov <mulr5=int64#4,>mulrax=int64#7
  3736. # asm 2: mov <mulr5=%rcx,>mulrax=%rax
  3737. mov %rcx,%rax
  3738. # qhasm: mulr5 = mulrdx
  3739. # asm 1: mov <mulrdx=int64#3,>mulr5=int64#4
  3740. # asm 2: mov <mulrdx=%rdx,>mulr5=%rcx
  3741. mov %rdx,%rcx
  3742. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3743. mulq crypto_scalarmult_curve25519_amd64_64_38
  3744. # qhasm: carry? mulr5 += mulrax
  3745. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  3746. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  3747. add %rax,%rcx
  3748. # qhasm: mulrax = mulr6
  3749. # asm 1: mov <mulr6=int64#5,>mulrax=int64#7
  3750. # asm 2: mov <mulr6=%r8,>mulrax=%rax
  3751. mov %r8,%rax
  3752. # qhasm: mulr6 = 0
  3753. # asm 1: mov $0,>mulr6=int64#5
  3754. # asm 2: mov $0,>mulr6=%r8
  3755. mov $0,%r8
  3756. # qhasm: mulr6 += mulrdx + carry
  3757. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  3758. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  3759. adc %rdx,%r8
  3760. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3761. mulq crypto_scalarmult_curve25519_amd64_64_38
  3762. # qhasm: carry? mulr6 += mulrax
  3763. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  3764. # asm 2: add <mulrax=%rax,<mulr6=%r8
  3765. add %rax,%r8
  3766. # qhasm: mulrax = mulr7
  3767. # asm 1: mov <mulr7=int64#6,>mulrax=int64#7
  3768. # asm 2: mov <mulr7=%r9,>mulrax=%rax
  3769. mov %r9,%rax
  3770. # qhasm: mulr7 = 0
  3771. # asm 1: mov $0,>mulr7=int64#6
  3772. # asm 2: mov $0,>mulr7=%r9
  3773. mov $0,%r9
  3774. # qhasm: mulr7 += mulrdx + carry
  3775. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  3776. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  3777. adc %rdx,%r9
  3778. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  3779. mulq crypto_scalarmult_curve25519_amd64_64_38
  3780. # qhasm: carry? mulr7 += mulrax
  3781. # asm 1: add <mulrax=int64#7,<mulr7=int64#6
  3782. # asm 2: add <mulrax=%rax,<mulr7=%r9
  3783. add %rax,%r9
  3784. # qhasm: mulr8 = 0
  3785. # asm 1: mov $0,>mulr8=int64#7
  3786. # asm 2: mov $0,>mulr8=%rax
  3787. mov $0,%rax
  3788. # qhasm: mulr8 += mulrdx + carry
  3789. # asm 1: adc <mulrdx=int64#3,<mulr8=int64#7
  3790. # asm 2: adc <mulrdx=%rdx,<mulr8=%rax
  3791. adc %rdx,%rax
  3792. # qhasm: carry? zq0 += mulr4
  3793. # asm 1: add <mulr4=int64#2,<zq0=int64#9
  3794. # asm 2: add <mulr4=%rsi,<zq0=%r11
  3795. add %rsi,%r11
  3796. # qhasm: carry? zq1 += mulr5 + carry
  3797. # asm 1: adc <mulr5=int64#4,<zq1=int64#10
  3798. # asm 2: adc <mulr5=%rcx,<zq1=%r12
  3799. adc %rcx,%r12
  3800. # qhasm: carry? zq2 += mulr6 + carry
  3801. # asm 1: adc <mulr6=int64#5,<zq2=int64#11
  3802. # asm 2: adc <mulr6=%r8,<zq2=%r13
  3803. adc %r8,%r13
  3804. # qhasm: carry? zq3 += mulr7 + carry
  3805. # asm 1: adc <mulr7=int64#6,<zq3=int64#12
  3806. # asm 2: adc <mulr7=%r9,<zq3=%r14
  3807. adc %r9,%r14
  3808. # qhasm: mulzero = 0
  3809. # asm 1: mov $0,>mulzero=int64#2
  3810. # asm 2: mov $0,>mulzero=%rsi
  3811. mov $0,%rsi
  3812. # qhasm: mulr8 += mulzero + carry
  3813. # asm 1: adc <mulzero=int64#2,<mulr8=int64#7
  3814. # asm 2: adc <mulzero=%rsi,<mulr8=%rax
  3815. adc %rsi,%rax
  3816. # qhasm: mulr8 *= 38
  3817. # asm 1: imulq $38,<mulr8=int64#7,>mulr8=int64#3
  3818. # asm 2: imulq $38,<mulr8=%rax,>mulr8=%rdx
  3819. imulq $38,%rax,%rdx
  3820. # qhasm: carry? zq0 += mulr8
  3821. # asm 1: add <mulr8=int64#3,<zq0=int64#9
  3822. # asm 2: add <mulr8=%rdx,<zq0=%r11
  3823. add %rdx,%r11
  3824. # qhasm: carry? zq1 += mulzero + carry
  3825. # asm 1: adc <mulzero=int64#2,<zq1=int64#10
  3826. # asm 2: adc <mulzero=%rsi,<zq1=%r12
  3827. adc %rsi,%r12
  3828. # qhasm: carry? zq2 += mulzero + carry
  3829. # asm 1: adc <mulzero=int64#2,<zq2=int64#11
  3830. # asm 2: adc <mulzero=%rsi,<zq2=%r13
  3831. adc %rsi,%r13
  3832. # qhasm: carry? zq3 += mulzero + carry
  3833. # asm 1: adc <mulzero=int64#2,<zq3=int64#12
  3834. # asm 2: adc <mulzero=%rsi,<zq3=%r14
  3835. adc %rsi,%r14
  3836. # qhasm: mulzero += mulzero + carry
  3837. # asm 1: adc <mulzero=int64#2,<mulzero=int64#2
  3838. # asm 2: adc <mulzero=%rsi,<mulzero=%rsi
  3839. adc %rsi,%rsi
  3840. # qhasm: mulzero *= 38
  3841. # asm 1: imulq $38,<mulzero=int64#2,>mulzero=int64#2
  3842. # asm 2: imulq $38,<mulzero=%rsi,>mulzero=%rsi
  3843. imulq $38,%rsi,%rsi
  3844. # qhasm: zq0 += mulzero
  3845. # asm 1: add <mulzero=int64#2,<zq0=int64#9
  3846. # asm 2: add <mulzero=%rsi,<zq0=%r11
  3847. add %rsi,%r11
  3848. # qhasm: *(uint64 *)(workp + 128) = zq0
  3849. # asm 1: movq <zq0=int64#9,128(<workp=int64#1)
  3850. # asm 2: movq <zq0=%r11,128(<workp=%rdi)
  3851. movq %r11,128(%rdi)
  3852. # qhasm: *(uint64 *)(workp + 136) = zq1
  3853. # asm 1: movq <zq1=int64#10,136(<workp=int64#1)
  3854. # asm 2: movq <zq1=%r12,136(<workp=%rdi)
  3855. movq %r12,136(%rdi)
  3856. # qhasm: *(uint64 *)(workp + 144) = zq2
  3857. # asm 1: movq <zq2=int64#11,144(<workp=int64#1)
  3858. # asm 2: movq <zq2=%r13,144(<workp=%rdi)
  3859. movq %r13,144(%rdi)
  3860. # qhasm: *(uint64 *)(workp + 152) = zq3
  3861. # asm 1: movq <zq3=int64#12,152(<workp=int64#1)
  3862. # asm 2: movq <zq3=%r14,152(<workp=%rdi)
  3863. movq %r14,152(%rdi)
  3864. # qhasm: mulr4 = 0
  3865. # asm 1: mov $0,>mulr4=int64#2
  3866. # asm 2: mov $0,>mulr4=%rsi
  3867. mov $0,%rsi
  3868. # qhasm: mulr5 = 0
  3869. # asm 1: mov $0,>mulr5=int64#4
  3870. # asm 2: mov $0,>mulr5=%rcx
  3871. mov $0,%rcx
  3872. # qhasm: mulr6 = 0
  3873. # asm 1: mov $0,>mulr6=int64#5
  3874. # asm 2: mov $0,>mulr6=%r8
  3875. mov $0,%r8
  3876. # qhasm: mulr7 = 0
  3877. # asm 1: mov $0,>mulr7=int64#6
  3878. # asm 2: mov $0,>mulr7=%r9
  3879. mov $0,%r9
  3880. # qhasm: mulx0 = t60_stack
  3881. # asm 1: movq <t60_stack=stack64#20,>mulx0=int64#8
  3882. # asm 2: movq <t60_stack=152(%rsp),>mulx0=%r10
  3883. movq 152(%rsp),%r10
  3884. # qhasm: mulrax = t70_stack
  3885. # asm 1: movq <t70_stack=stack64#16,>mulrax=int64#7
  3886. # asm 2: movq <t70_stack=120(%rsp),>mulrax=%rax
  3887. movq 120(%rsp),%rax
  3888. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3889. # asm 1: mul <mulx0=int64#8
  3890. # asm 2: mul <mulx0=%r10
  3891. mul %r10
  3892. # qhasm: xp0 = mulrax
  3893. # asm 1: mov <mulrax=int64#7,>xp0=int64#9
  3894. # asm 2: mov <mulrax=%rax,>xp0=%r11
  3895. mov %rax,%r11
  3896. # qhasm: xp1 = mulrdx
  3897. # asm 1: mov <mulrdx=int64#3,>xp1=int64#10
  3898. # asm 2: mov <mulrdx=%rdx,>xp1=%r12
  3899. mov %rdx,%r12
  3900. # qhasm: mulrax = t71_stack
  3901. # asm 1: movq <t71_stack=stack64#17,>mulrax=int64#7
  3902. # asm 2: movq <t71_stack=128(%rsp),>mulrax=%rax
  3903. movq 128(%rsp),%rax
  3904. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3905. # asm 1: mul <mulx0=int64#8
  3906. # asm 2: mul <mulx0=%r10
  3907. mul %r10
  3908. # qhasm: carry? xp1 += mulrax
  3909. # asm 1: add <mulrax=int64#7,<xp1=int64#10
  3910. # asm 2: add <mulrax=%rax,<xp1=%r12
  3911. add %rax,%r12
  3912. # qhasm: xp2 = 0
  3913. # asm 1: mov $0,>xp2=int64#11
  3914. # asm 2: mov $0,>xp2=%r13
  3915. mov $0,%r13
  3916. # qhasm: xp2 += mulrdx + carry
  3917. # asm 1: adc <mulrdx=int64#3,<xp2=int64#11
  3918. # asm 2: adc <mulrdx=%rdx,<xp2=%r13
  3919. adc %rdx,%r13
  3920. # qhasm: mulrax = t72_stack
  3921. # asm 1: movq <t72_stack=stack64#18,>mulrax=int64#7
  3922. # asm 2: movq <t72_stack=136(%rsp),>mulrax=%rax
  3923. movq 136(%rsp),%rax
  3924. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3925. # asm 1: mul <mulx0=int64#8
  3926. # asm 2: mul <mulx0=%r10
  3927. mul %r10
  3928. # qhasm: carry? xp2 += mulrax
  3929. # asm 1: add <mulrax=int64#7,<xp2=int64#11
  3930. # asm 2: add <mulrax=%rax,<xp2=%r13
  3931. add %rax,%r13
  3932. # qhasm: xp3 = 0
  3933. # asm 1: mov $0,>xp3=int64#12
  3934. # asm 2: mov $0,>xp3=%r14
  3935. mov $0,%r14
  3936. # qhasm: xp3 += mulrdx + carry
  3937. # asm 1: adc <mulrdx=int64#3,<xp3=int64#12
  3938. # asm 2: adc <mulrdx=%rdx,<xp3=%r14
  3939. adc %rdx,%r14
  3940. # qhasm: mulrax = t73_stack
  3941. # asm 1: movq <t73_stack=stack64#19,>mulrax=int64#7
  3942. # asm 2: movq <t73_stack=144(%rsp),>mulrax=%rax
  3943. movq 144(%rsp),%rax
  3944. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  3945. # asm 1: mul <mulx0=int64#8
  3946. # asm 2: mul <mulx0=%r10
  3947. mul %r10
  3948. # qhasm: carry? xp3 += mulrax
  3949. # asm 1: add <mulrax=int64#7,<xp3=int64#12
  3950. # asm 2: add <mulrax=%rax,<xp3=%r14
  3951. add %rax,%r14
  3952. # qhasm: mulr4 += mulrdx + carry
  3953. # asm 1: adc <mulrdx=int64#3,<mulr4=int64#2
  3954. # asm 2: adc <mulrdx=%rdx,<mulr4=%rsi
  3955. adc %rdx,%rsi
  3956. # qhasm: mulx1 = t61_stack
  3957. # asm 1: movq <t61_stack=stack64#21,>mulx1=int64#8
  3958. # asm 2: movq <t61_stack=160(%rsp),>mulx1=%r10
  3959. movq 160(%rsp),%r10
  3960. # qhasm: mulrax = t70_stack
  3961. # asm 1: movq <t70_stack=stack64#16,>mulrax=int64#7
  3962. # asm 2: movq <t70_stack=120(%rsp),>mulrax=%rax
  3963. movq 120(%rsp),%rax
  3964. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3965. # asm 1: mul <mulx1=int64#8
  3966. # asm 2: mul <mulx1=%r10
  3967. mul %r10
  3968. # qhasm: carry? xp1 += mulrax
  3969. # asm 1: add <mulrax=int64#7,<xp1=int64#10
  3970. # asm 2: add <mulrax=%rax,<xp1=%r12
  3971. add %rax,%r12
  3972. # qhasm: mulc = 0
  3973. # asm 1: mov $0,>mulc=int64#13
  3974. # asm 2: mov $0,>mulc=%r15
  3975. mov $0,%r15
  3976. # qhasm: mulc += mulrdx + carry
  3977. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  3978. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  3979. adc %rdx,%r15
  3980. # qhasm: mulrax = t71_stack
  3981. # asm 1: movq <t71_stack=stack64#17,>mulrax=int64#7
  3982. # asm 2: movq <t71_stack=128(%rsp),>mulrax=%rax
  3983. movq 128(%rsp),%rax
  3984. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  3985. # asm 1: mul <mulx1=int64#8
  3986. # asm 2: mul <mulx1=%r10
  3987. mul %r10
  3988. # qhasm: carry? xp2 += mulrax
  3989. # asm 1: add <mulrax=int64#7,<xp2=int64#11
  3990. # asm 2: add <mulrax=%rax,<xp2=%r13
  3991. add %rax,%r13
  3992. # qhasm: mulrdx += 0 + carry
  3993. # asm 1: adc $0,<mulrdx=int64#3
  3994. # asm 2: adc $0,<mulrdx=%rdx
  3995. adc $0,%rdx
  3996. # qhasm: carry? xp2 += mulc
  3997. # asm 1: add <mulc=int64#13,<xp2=int64#11
  3998. # asm 2: add <mulc=%r15,<xp2=%r13
  3999. add %r15,%r13
  4000. # qhasm: mulc = 0
  4001. # asm 1: mov $0,>mulc=int64#13
  4002. # asm 2: mov $0,>mulc=%r15
  4003. mov $0,%r15
  4004. # qhasm: mulc += mulrdx + carry
  4005. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4006. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4007. adc %rdx,%r15
  4008. # qhasm: mulrax = t72_stack
  4009. # asm 1: movq <t72_stack=stack64#18,>mulrax=int64#7
  4010. # asm 2: movq <t72_stack=136(%rsp),>mulrax=%rax
  4011. movq 136(%rsp),%rax
  4012. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4013. # asm 1: mul <mulx1=int64#8
  4014. # asm 2: mul <mulx1=%r10
  4015. mul %r10
  4016. # qhasm: carry? xp3 += mulrax
  4017. # asm 1: add <mulrax=int64#7,<xp3=int64#12
  4018. # asm 2: add <mulrax=%rax,<xp3=%r14
  4019. add %rax,%r14
  4020. # qhasm: mulrdx += 0 + carry
  4021. # asm 1: adc $0,<mulrdx=int64#3
  4022. # asm 2: adc $0,<mulrdx=%rdx
  4023. adc $0,%rdx
  4024. # qhasm: carry? xp3 += mulc
  4025. # asm 1: add <mulc=int64#13,<xp3=int64#12
  4026. # asm 2: add <mulc=%r15,<xp3=%r14
  4027. add %r15,%r14
  4028. # qhasm: mulc = 0
  4029. # asm 1: mov $0,>mulc=int64#13
  4030. # asm 2: mov $0,>mulc=%r15
  4031. mov $0,%r15
  4032. # qhasm: mulc += mulrdx + carry
  4033. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4034. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4035. adc %rdx,%r15
  4036. # qhasm: mulrax = t73_stack
  4037. # asm 1: movq <t73_stack=stack64#19,>mulrax=int64#7
  4038. # asm 2: movq <t73_stack=144(%rsp),>mulrax=%rax
  4039. movq 144(%rsp),%rax
  4040. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4041. # asm 1: mul <mulx1=int64#8
  4042. # asm 2: mul <mulx1=%r10
  4043. mul %r10
  4044. # qhasm: carry? mulr4 += mulrax
  4045. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4046. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4047. add %rax,%rsi
  4048. # qhasm: mulrdx += 0 + carry
  4049. # asm 1: adc $0,<mulrdx=int64#3
  4050. # asm 2: adc $0,<mulrdx=%rdx
  4051. adc $0,%rdx
  4052. # qhasm: carry? mulr4 += mulc
  4053. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4054. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4055. add %r15,%rsi
  4056. # qhasm: mulr5 += mulrdx + carry
  4057. # asm 1: adc <mulrdx=int64#3,<mulr5=int64#4
  4058. # asm 2: adc <mulrdx=%rdx,<mulr5=%rcx
  4059. adc %rdx,%rcx
  4060. # qhasm: mulx2 = t62_stack
  4061. # asm 1: movq <t62_stack=stack64#22,>mulx2=int64#8
  4062. # asm 2: movq <t62_stack=168(%rsp),>mulx2=%r10
  4063. movq 168(%rsp),%r10
  4064. # qhasm: mulrax = t70_stack
  4065. # asm 1: movq <t70_stack=stack64#16,>mulrax=int64#7
  4066. # asm 2: movq <t70_stack=120(%rsp),>mulrax=%rax
  4067. movq 120(%rsp),%rax
  4068. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4069. # asm 1: mul <mulx2=int64#8
  4070. # asm 2: mul <mulx2=%r10
  4071. mul %r10
  4072. # qhasm: carry? xp2 += mulrax
  4073. # asm 1: add <mulrax=int64#7,<xp2=int64#11
  4074. # asm 2: add <mulrax=%rax,<xp2=%r13
  4075. add %rax,%r13
  4076. # qhasm: mulc = 0
  4077. # asm 1: mov $0,>mulc=int64#13
  4078. # asm 2: mov $0,>mulc=%r15
  4079. mov $0,%r15
  4080. # qhasm: mulc += mulrdx + carry
  4081. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4082. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4083. adc %rdx,%r15
  4084. # qhasm: mulrax = t71_stack
  4085. # asm 1: movq <t71_stack=stack64#17,>mulrax=int64#7
  4086. # asm 2: movq <t71_stack=128(%rsp),>mulrax=%rax
  4087. movq 128(%rsp),%rax
  4088. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4089. # asm 1: mul <mulx2=int64#8
  4090. # asm 2: mul <mulx2=%r10
  4091. mul %r10
  4092. # qhasm: carry? xp3 += mulrax
  4093. # asm 1: add <mulrax=int64#7,<xp3=int64#12
  4094. # asm 2: add <mulrax=%rax,<xp3=%r14
  4095. add %rax,%r14
  4096. # qhasm: mulrdx += 0 + carry
  4097. # asm 1: adc $0,<mulrdx=int64#3
  4098. # asm 2: adc $0,<mulrdx=%rdx
  4099. adc $0,%rdx
  4100. # qhasm: carry? xp3 += mulc
  4101. # asm 1: add <mulc=int64#13,<xp3=int64#12
  4102. # asm 2: add <mulc=%r15,<xp3=%r14
  4103. add %r15,%r14
  4104. # qhasm: mulc = 0
  4105. # asm 1: mov $0,>mulc=int64#13
  4106. # asm 2: mov $0,>mulc=%r15
  4107. mov $0,%r15
  4108. # qhasm: mulc += mulrdx + carry
  4109. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4110. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4111. adc %rdx,%r15
  4112. # qhasm: mulrax = t72_stack
  4113. # asm 1: movq <t72_stack=stack64#18,>mulrax=int64#7
  4114. # asm 2: movq <t72_stack=136(%rsp),>mulrax=%rax
  4115. movq 136(%rsp),%rax
  4116. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4117. # asm 1: mul <mulx2=int64#8
  4118. # asm 2: mul <mulx2=%r10
  4119. mul %r10
  4120. # qhasm: carry? mulr4 += mulrax
  4121. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4122. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4123. add %rax,%rsi
  4124. # qhasm: mulrdx += 0 + carry
  4125. # asm 1: adc $0,<mulrdx=int64#3
  4126. # asm 2: adc $0,<mulrdx=%rdx
  4127. adc $0,%rdx
  4128. # qhasm: carry? mulr4 += mulc
  4129. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4130. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4131. add %r15,%rsi
  4132. # qhasm: mulc = 0
  4133. # asm 1: mov $0,>mulc=int64#13
  4134. # asm 2: mov $0,>mulc=%r15
  4135. mov $0,%r15
  4136. # qhasm: mulc += mulrdx + carry
  4137. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4138. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4139. adc %rdx,%r15
  4140. # qhasm: mulrax = t73_stack
  4141. # asm 1: movq <t73_stack=stack64#19,>mulrax=int64#7
  4142. # asm 2: movq <t73_stack=144(%rsp),>mulrax=%rax
  4143. movq 144(%rsp),%rax
  4144. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4145. # asm 1: mul <mulx2=int64#8
  4146. # asm 2: mul <mulx2=%r10
  4147. mul %r10
  4148. # qhasm: carry? mulr5 += mulrax
  4149. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  4150. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  4151. add %rax,%rcx
  4152. # qhasm: mulrdx += 0 + carry
  4153. # asm 1: adc $0,<mulrdx=int64#3
  4154. # asm 2: adc $0,<mulrdx=%rdx
  4155. adc $0,%rdx
  4156. # qhasm: carry? mulr5 += mulc
  4157. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  4158. # asm 2: add <mulc=%r15,<mulr5=%rcx
  4159. add %r15,%rcx
  4160. # qhasm: mulr6 += mulrdx + carry
  4161. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  4162. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  4163. adc %rdx,%r8
  4164. # qhasm: mulx3 = t63_stack
  4165. # asm 1: movq <t63_stack=stack64#23,>mulx3=int64#8
  4166. # asm 2: movq <t63_stack=176(%rsp),>mulx3=%r10
  4167. movq 176(%rsp),%r10
  4168. # qhasm: mulrax = t70_stack
  4169. # asm 1: movq <t70_stack=stack64#16,>mulrax=int64#7
  4170. # asm 2: movq <t70_stack=120(%rsp),>mulrax=%rax
  4171. movq 120(%rsp),%rax
  4172. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4173. # asm 1: mul <mulx3=int64#8
  4174. # asm 2: mul <mulx3=%r10
  4175. mul %r10
  4176. # qhasm: carry? xp3 += mulrax
  4177. # asm 1: add <mulrax=int64#7,<xp3=int64#12
  4178. # asm 2: add <mulrax=%rax,<xp3=%r14
  4179. add %rax,%r14
  4180. # qhasm: mulc = 0
  4181. # asm 1: mov $0,>mulc=int64#13
  4182. # asm 2: mov $0,>mulc=%r15
  4183. mov $0,%r15
  4184. # qhasm: mulc += mulrdx + carry
  4185. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4186. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4187. adc %rdx,%r15
  4188. # qhasm: mulrax = t71_stack
  4189. # asm 1: movq <t71_stack=stack64#17,>mulrax=int64#7
  4190. # asm 2: movq <t71_stack=128(%rsp),>mulrax=%rax
  4191. movq 128(%rsp),%rax
  4192. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4193. # asm 1: mul <mulx3=int64#8
  4194. # asm 2: mul <mulx3=%r10
  4195. mul %r10
  4196. # qhasm: carry? mulr4 += mulrax
  4197. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4198. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4199. add %rax,%rsi
  4200. # qhasm: mulrdx += 0 + carry
  4201. # asm 1: adc $0,<mulrdx=int64#3
  4202. # asm 2: adc $0,<mulrdx=%rdx
  4203. adc $0,%rdx
  4204. # qhasm: carry? mulr4 += mulc
  4205. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4206. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4207. add %r15,%rsi
  4208. # qhasm: mulc = 0
  4209. # asm 1: mov $0,>mulc=int64#13
  4210. # asm 2: mov $0,>mulc=%r15
  4211. mov $0,%r15
  4212. # qhasm: mulc += mulrdx + carry
  4213. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4214. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4215. adc %rdx,%r15
  4216. # qhasm: mulrax = t72_stack
  4217. # asm 1: movq <t72_stack=stack64#18,>mulrax=int64#7
  4218. # asm 2: movq <t72_stack=136(%rsp),>mulrax=%rax
  4219. movq 136(%rsp),%rax
  4220. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4221. # asm 1: mul <mulx3=int64#8
  4222. # asm 2: mul <mulx3=%r10
  4223. mul %r10
  4224. # qhasm: carry? mulr5 += mulrax
  4225. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  4226. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  4227. add %rax,%rcx
  4228. # qhasm: mulrdx += 0 + carry
  4229. # asm 1: adc $0,<mulrdx=int64#3
  4230. # asm 2: adc $0,<mulrdx=%rdx
  4231. adc $0,%rdx
  4232. # qhasm: carry? mulr5 += mulc
  4233. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  4234. # asm 2: add <mulc=%r15,<mulr5=%rcx
  4235. add %r15,%rcx
  4236. # qhasm: mulc = 0
  4237. # asm 1: mov $0,>mulc=int64#13
  4238. # asm 2: mov $0,>mulc=%r15
  4239. mov $0,%r15
  4240. # qhasm: mulc += mulrdx + carry
  4241. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4242. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4243. adc %rdx,%r15
  4244. # qhasm: mulrax = t73_stack
  4245. # asm 1: movq <t73_stack=stack64#19,>mulrax=int64#7
  4246. # asm 2: movq <t73_stack=144(%rsp),>mulrax=%rax
  4247. movq 144(%rsp),%rax
  4248. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4249. # asm 1: mul <mulx3=int64#8
  4250. # asm 2: mul <mulx3=%r10
  4251. mul %r10
  4252. # qhasm: carry? mulr6 += mulrax
  4253. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  4254. # asm 2: add <mulrax=%rax,<mulr6=%r8
  4255. add %rax,%r8
  4256. # qhasm: mulrdx += 0 + carry
  4257. # asm 1: adc $0,<mulrdx=int64#3
  4258. # asm 2: adc $0,<mulrdx=%rdx
  4259. adc $0,%rdx
  4260. # qhasm: carry? mulr6 += mulc
  4261. # asm 1: add <mulc=int64#13,<mulr6=int64#5
  4262. # asm 2: add <mulc=%r15,<mulr6=%r8
  4263. add %r15,%r8
  4264. # qhasm: mulr7 += mulrdx + carry
  4265. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  4266. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  4267. adc %rdx,%r9
  4268. # qhasm: mulrax = mulr4
  4269. # asm 1: mov <mulr4=int64#2,>mulrax=int64#7
  4270. # asm 2: mov <mulr4=%rsi,>mulrax=%rax
  4271. mov %rsi,%rax
  4272. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  4273. mulq crypto_scalarmult_curve25519_amd64_64_38
  4274. # qhasm: mulr4 = mulrax
  4275. # asm 1: mov <mulrax=int64#7,>mulr4=int64#2
  4276. # asm 2: mov <mulrax=%rax,>mulr4=%rsi
  4277. mov %rax,%rsi
  4278. # qhasm: mulrax = mulr5
  4279. # asm 1: mov <mulr5=int64#4,>mulrax=int64#7
  4280. # asm 2: mov <mulr5=%rcx,>mulrax=%rax
  4281. mov %rcx,%rax
  4282. # qhasm: mulr5 = mulrdx
  4283. # asm 1: mov <mulrdx=int64#3,>mulr5=int64#4
  4284. # asm 2: mov <mulrdx=%rdx,>mulr5=%rcx
  4285. mov %rdx,%rcx
  4286. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  4287. mulq crypto_scalarmult_curve25519_amd64_64_38
  4288. # qhasm: carry? mulr5 += mulrax
  4289. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  4290. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  4291. add %rax,%rcx
  4292. # qhasm: mulrax = mulr6
  4293. # asm 1: mov <mulr6=int64#5,>mulrax=int64#7
  4294. # asm 2: mov <mulr6=%r8,>mulrax=%rax
  4295. mov %r8,%rax
  4296. # qhasm: mulr6 = 0
  4297. # asm 1: mov $0,>mulr6=int64#5
  4298. # asm 2: mov $0,>mulr6=%r8
  4299. mov $0,%r8
  4300. # qhasm: mulr6 += mulrdx + carry
  4301. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  4302. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  4303. adc %rdx,%r8
  4304. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  4305. mulq crypto_scalarmult_curve25519_amd64_64_38
  4306. # qhasm: carry? mulr6 += mulrax
  4307. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  4308. # asm 2: add <mulrax=%rax,<mulr6=%r8
  4309. add %rax,%r8
  4310. # qhasm: mulrax = mulr7
  4311. # asm 1: mov <mulr7=int64#6,>mulrax=int64#7
  4312. # asm 2: mov <mulr7=%r9,>mulrax=%rax
  4313. mov %r9,%rax
  4314. # qhasm: mulr7 = 0
  4315. # asm 1: mov $0,>mulr7=int64#6
  4316. # asm 2: mov $0,>mulr7=%r9
  4317. mov $0,%r9
  4318. # qhasm: mulr7 += mulrdx + carry
  4319. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  4320. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  4321. adc %rdx,%r9
  4322. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  4323. mulq crypto_scalarmult_curve25519_amd64_64_38
  4324. # qhasm: carry? mulr7 += mulrax
  4325. # asm 1: add <mulrax=int64#7,<mulr7=int64#6
  4326. # asm 2: add <mulrax=%rax,<mulr7=%r9
  4327. add %rax,%r9
  4328. # qhasm: mulr8 = 0
  4329. # asm 1: mov $0,>mulr8=int64#7
  4330. # asm 2: mov $0,>mulr8=%rax
  4331. mov $0,%rax
  4332. # qhasm: mulr8 += mulrdx + carry
  4333. # asm 1: adc <mulrdx=int64#3,<mulr8=int64#7
  4334. # asm 2: adc <mulrdx=%rdx,<mulr8=%rax
  4335. adc %rdx,%rax
  4336. # qhasm: carry? xp0 += mulr4
  4337. # asm 1: add <mulr4=int64#2,<xp0=int64#9
  4338. # asm 2: add <mulr4=%rsi,<xp0=%r11
  4339. add %rsi,%r11
  4340. # qhasm: carry? xp1 += mulr5 + carry
  4341. # asm 1: adc <mulr5=int64#4,<xp1=int64#10
  4342. # asm 2: adc <mulr5=%rcx,<xp1=%r12
  4343. adc %rcx,%r12
  4344. # qhasm: carry? xp2 += mulr6 + carry
  4345. # asm 1: adc <mulr6=int64#5,<xp2=int64#11
  4346. # asm 2: adc <mulr6=%r8,<xp2=%r13
  4347. adc %r8,%r13
  4348. # qhasm: carry? xp3 += mulr7 + carry
  4349. # asm 1: adc <mulr7=int64#6,<xp3=int64#12
  4350. # asm 2: adc <mulr7=%r9,<xp3=%r14
  4351. adc %r9,%r14
  4352. # qhasm: mulzero = 0
  4353. # asm 1: mov $0,>mulzero=int64#2
  4354. # asm 2: mov $0,>mulzero=%rsi
  4355. mov $0,%rsi
  4356. # qhasm: mulr8 += mulzero + carry
  4357. # asm 1: adc <mulzero=int64#2,<mulr8=int64#7
  4358. # asm 2: adc <mulzero=%rsi,<mulr8=%rax
  4359. adc %rsi,%rax
  4360. # qhasm: mulr8 *= 38
  4361. # asm 1: imulq $38,<mulr8=int64#7,>mulr8=int64#3
  4362. # asm 2: imulq $38,<mulr8=%rax,>mulr8=%rdx
  4363. imulq $38,%rax,%rdx
  4364. # qhasm: carry? xp0 += mulr8
  4365. # asm 1: add <mulr8=int64#3,<xp0=int64#9
  4366. # asm 2: add <mulr8=%rdx,<xp0=%r11
  4367. add %rdx,%r11
  4368. # qhasm: carry? xp1 += mulzero + carry
  4369. # asm 1: adc <mulzero=int64#2,<xp1=int64#10
  4370. # asm 2: adc <mulzero=%rsi,<xp1=%r12
  4371. adc %rsi,%r12
  4372. # qhasm: carry? xp2 += mulzero + carry
  4373. # asm 1: adc <mulzero=int64#2,<xp2=int64#11
  4374. # asm 2: adc <mulzero=%rsi,<xp2=%r13
  4375. adc %rsi,%r13
  4376. # qhasm: carry? xp3 += mulzero + carry
  4377. # asm 1: adc <mulzero=int64#2,<xp3=int64#12
  4378. # asm 2: adc <mulzero=%rsi,<xp3=%r14
  4379. adc %rsi,%r14
  4380. # qhasm: mulzero += mulzero + carry
  4381. # asm 1: adc <mulzero=int64#2,<mulzero=int64#2
  4382. # asm 2: adc <mulzero=%rsi,<mulzero=%rsi
  4383. adc %rsi,%rsi
  4384. # qhasm: mulzero *= 38
  4385. # asm 1: imulq $38,<mulzero=int64#2,>mulzero=int64#2
  4386. # asm 2: imulq $38,<mulzero=%rsi,>mulzero=%rsi
  4387. imulq $38,%rsi,%rsi
  4388. # qhasm: xp0 += mulzero
  4389. # asm 1: add <mulzero=int64#2,<xp0=int64#9
  4390. # asm 2: add <mulzero=%rsi,<xp0=%r11
  4391. add %rsi,%r11
  4392. # qhasm: *(uint64 *)(workp + 32) = xp0
  4393. # asm 1: movq <xp0=int64#9,32(<workp=int64#1)
  4394. # asm 2: movq <xp0=%r11,32(<workp=%rdi)
  4395. movq %r11,32(%rdi)
  4396. # qhasm: *(uint64 *)(workp + 40) = xp1
  4397. # asm 1: movq <xp1=int64#10,40(<workp=int64#1)
  4398. # asm 2: movq <xp1=%r12,40(<workp=%rdi)
  4399. movq %r12,40(%rdi)
  4400. # qhasm: *(uint64 *)(workp + 48) = xp2
  4401. # asm 1: movq <xp2=int64#11,48(<workp=int64#1)
  4402. # asm 2: movq <xp2=%r13,48(<workp=%rdi)
  4403. movq %r13,48(%rdi)
  4404. # qhasm: *(uint64 *)(workp + 56) = xp3
  4405. # asm 1: movq <xp3=int64#12,56(<workp=int64#1)
  4406. # asm 2: movq <xp3=%r14,56(<workp=%rdi)
  4407. movq %r14,56(%rdi)
  4408. # qhasm: mul121666rax = t50_stack
  4409. # asm 1: movq <t50_stack=stack64#24,>mul121666rax=int64#7
  4410. # asm 2: movq <t50_stack=184(%rsp),>mul121666rax=%rax
  4411. movq 184(%rsp),%rax
  4412. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_121666
  4413. mulq crypto_scalarmult_curve25519_amd64_64_121666
  4414. # qhasm: zp0 = mul121666rax
  4415. # asm 1: mov <mul121666rax=int64#7,>zp0=int64#2
  4416. # asm 2: mov <mul121666rax=%rax,>zp0=%rsi
  4417. mov %rax,%rsi
  4418. # qhasm: zp1 = mul121666rdx
  4419. # asm 1: mov <mul121666rdx=int64#3,>zp1=int64#4
  4420. # asm 2: mov <mul121666rdx=%rdx,>zp1=%rcx
  4421. mov %rdx,%rcx
  4422. # qhasm: mul121666rax = t52_stack
  4423. # asm 1: movq <t52_stack=stack64#26,>mul121666rax=int64#7
  4424. # asm 2: movq <t52_stack=200(%rsp),>mul121666rax=%rax
  4425. movq 200(%rsp),%rax
  4426. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_121666
  4427. mulq crypto_scalarmult_curve25519_amd64_64_121666
  4428. # qhasm: zp2 = mul121666rax
  4429. # asm 1: mov <mul121666rax=int64#7,>zp2=int64#5
  4430. # asm 2: mov <mul121666rax=%rax,>zp2=%r8
  4431. mov %rax,%r8
  4432. # qhasm: zp3 = mul121666rdx
  4433. # asm 1: mov <mul121666rdx=int64#3,>zp3=int64#6
  4434. # asm 2: mov <mul121666rdx=%rdx,>zp3=%r9
  4435. mov %rdx,%r9
  4436. # qhasm: mul121666rax = t51_stack
  4437. # asm 1: movq <t51_stack=stack64#25,>mul121666rax=int64#7
  4438. # asm 2: movq <t51_stack=192(%rsp),>mul121666rax=%rax
  4439. movq 192(%rsp),%rax
  4440. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_121666
  4441. mulq crypto_scalarmult_curve25519_amd64_64_121666
  4442. # qhasm: mul121666t1 = mul121666rax
  4443. # asm 1: mov <mul121666rax=int64#7,>mul121666t1=int64#8
  4444. # asm 2: mov <mul121666rax=%rax,>mul121666t1=%r10
  4445. mov %rax,%r10
  4446. # qhasm: mul121666t2 = mul121666rdx
  4447. # asm 1: mov <mul121666rdx=int64#3,>mul121666t2=int64#9
  4448. # asm 2: mov <mul121666rdx=%rdx,>mul121666t2=%r11
  4449. mov %rdx,%r11
  4450. # qhasm: mul121666rax = t53_stack
  4451. # asm 1: movq <t53_stack=stack64#27,>mul121666rax=int64#7
  4452. # asm 2: movq <t53_stack=208(%rsp),>mul121666rax=%rax
  4453. movq 208(%rsp),%rax
  4454. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_121666
  4455. mulq crypto_scalarmult_curve25519_amd64_64_121666
  4456. # qhasm: mul121666t3 = mul121666rax
  4457. # asm 1: mov <mul121666rax=int64#7,>mul121666t3=int64#7
  4458. # asm 2: mov <mul121666rax=%rax,>mul121666t3=%rax
  4459. mov %rax,%rax
  4460. # qhasm: mul121666r4 = mul121666rdx
  4461. # asm 1: mov <mul121666rdx=int64#3,>mul121666r4=int64#3
  4462. # asm 2: mov <mul121666rdx=%rdx,>mul121666r4=%rdx
  4463. mov %rdx,%rdx
  4464. # qhasm: carry? zp1 += mul121666t1
  4465. # asm 1: add <mul121666t1=int64#8,<zp1=int64#4
  4466. # asm 2: add <mul121666t1=%r10,<zp1=%rcx
  4467. add %r10,%rcx
  4468. # qhasm: carry? zp2 += mul121666t2 + carry
  4469. # asm 1: adc <mul121666t2=int64#9,<zp2=int64#5
  4470. # asm 2: adc <mul121666t2=%r11,<zp2=%r8
  4471. adc %r11,%r8
  4472. # qhasm: carry? zp3 += mul121666t3 + carry
  4473. # asm 1: adc <mul121666t3=int64#7,<zp3=int64#6
  4474. # asm 2: adc <mul121666t3=%rax,<zp3=%r9
  4475. adc %rax,%r9
  4476. # qhasm: mul121666r4 += 0 + carry
  4477. # asm 1: adc $0,<mul121666r4=int64#3
  4478. # asm 2: adc $0,<mul121666r4=%rdx
  4479. adc $0,%rdx
  4480. # qhasm: mul121666r4 *= 38
  4481. # asm 1: imulq $38,<mul121666r4=int64#3,>mul121666r4=int64#3
  4482. # asm 2: imulq $38,<mul121666r4=%rdx,>mul121666r4=%rdx
  4483. imulq $38,%rdx,%rdx
  4484. # qhasm: carry? zp0 += mul121666r4
  4485. # asm 1: add <mul121666r4=int64#3,<zp0=int64#2
  4486. # asm 2: add <mul121666r4=%rdx,<zp0=%rsi
  4487. add %rdx,%rsi
  4488. # qhasm: carry? zp1 += 0 + carry
  4489. # asm 1: adc $0,<zp1=int64#4
  4490. # asm 2: adc $0,<zp1=%rcx
  4491. adc $0,%rcx
  4492. # qhasm: carry? zp2 += 0 + carry
  4493. # asm 1: adc $0,<zp2=int64#5
  4494. # asm 2: adc $0,<zp2=%r8
  4495. adc $0,%r8
  4496. # qhasm: carry? zp3 += 0 + carry
  4497. # asm 1: adc $0,<zp3=int64#6
  4498. # asm 2: adc $0,<zp3=%r9
  4499. adc $0,%r9
  4500. # qhasm: mul121666t1 = 38
  4501. # asm 1: mov $38,>mul121666t1=int64#3
  4502. # asm 2: mov $38,>mul121666t1=%rdx
  4503. mov $38,%rdx
  4504. # qhasm: mul121666t2 = 0
  4505. # asm 1: mov $0,>mul121666t2=int64#7
  4506. # asm 2: mov $0,>mul121666t2=%rax
  4507. mov $0,%rax
  4508. # qhasm: mul121666t1 = mul121666t2 if !carry
  4509. # asm 1: cmovae <mul121666t2=int64#7,<mul121666t1=int64#3
  4510. # asm 2: cmovae <mul121666t2=%rax,<mul121666t1=%rdx
  4511. cmovae %rax,%rdx
  4512. # qhasm: zp0 += mul121666t1
  4513. # asm 1: add <mul121666t1=int64#3,<zp0=int64#2
  4514. # asm 2: add <mul121666t1=%rdx,<zp0=%rsi
  4515. add %rdx,%rsi
  4516. # qhasm: carry? zp0 += t70_stack
  4517. # asm 1: addq <t70_stack=stack64#16,<zp0=int64#2
  4518. # asm 2: addq <t70_stack=120(%rsp),<zp0=%rsi
  4519. addq 120(%rsp),%rsi
  4520. # qhasm: carry? zp1 += t71_stack + carry
  4521. # asm 1: adcq <t71_stack=stack64#17,<zp1=int64#4
  4522. # asm 2: adcq <t71_stack=128(%rsp),<zp1=%rcx
  4523. adcq 128(%rsp),%rcx
  4524. # qhasm: carry? zp2 += t72_stack + carry
  4525. # asm 1: adcq <t72_stack=stack64#18,<zp2=int64#5
  4526. # asm 2: adcq <t72_stack=136(%rsp),<zp2=%r8
  4527. adcq 136(%rsp),%r8
  4528. # qhasm: carry? zp3 += t73_stack + carry
  4529. # asm 1: adcq <t73_stack=stack64#19,<zp3=int64#6
  4530. # asm 2: adcq <t73_stack=144(%rsp),<zp3=%r9
  4531. adcq 144(%rsp),%r9
  4532. # qhasm: addt0 = 0
  4533. # asm 1: mov $0,>addt0=int64#3
  4534. # asm 2: mov $0,>addt0=%rdx
  4535. mov $0,%rdx
  4536. # qhasm: addt1 = 38
  4537. # asm 1: mov $38,>addt1=int64#7
  4538. # asm 2: mov $38,>addt1=%rax
  4539. mov $38,%rax
  4540. # qhasm: addt1 = addt0 if !carry
  4541. # asm 1: cmovae <addt0=int64#3,<addt1=int64#7
  4542. # asm 2: cmovae <addt0=%rdx,<addt1=%rax
  4543. cmovae %rdx,%rax
  4544. # qhasm: carry? zp0 += addt1
  4545. # asm 1: add <addt1=int64#7,<zp0=int64#2
  4546. # asm 2: add <addt1=%rax,<zp0=%rsi
  4547. add %rax,%rsi
  4548. # qhasm: carry? zp1 += addt0 + carry
  4549. # asm 1: adc <addt0=int64#3,<zp1=int64#4
  4550. # asm 2: adc <addt0=%rdx,<zp1=%rcx
  4551. adc %rdx,%rcx
  4552. # qhasm: carry? zp2 += addt0 + carry
  4553. # asm 1: adc <addt0=int64#3,<zp2=int64#5
  4554. # asm 2: adc <addt0=%rdx,<zp2=%r8
  4555. adc %rdx,%r8
  4556. # qhasm: carry? zp3 += addt0 + carry
  4557. # asm 1: adc <addt0=int64#3,<zp3=int64#6
  4558. # asm 2: adc <addt0=%rdx,<zp3=%r9
  4559. adc %rdx,%r9
  4560. # qhasm: addt0 = addt1 if carry
  4561. # asm 1: cmovc <addt1=int64#7,<addt0=int64#3
  4562. # asm 2: cmovc <addt1=%rax,<addt0=%rdx
  4563. cmovc %rax,%rdx
  4564. # qhasm: zp0 += addt0
  4565. # asm 1: add <addt0=int64#3,<zp0=int64#2
  4566. # asm 2: add <addt0=%rdx,<zp0=%rsi
  4567. add %rdx,%rsi
  4568. # qhasm: *(uint64 *)(workp + 64) = zp0
  4569. # asm 1: movq <zp0=int64#2,64(<workp=int64#1)
  4570. # asm 2: movq <zp0=%rsi,64(<workp=%rdi)
  4571. movq %rsi,64(%rdi)
  4572. # qhasm: *(uint64 *)(workp + 72) = zp1
  4573. # asm 1: movq <zp1=int64#4,72(<workp=int64#1)
  4574. # asm 2: movq <zp1=%rcx,72(<workp=%rdi)
  4575. movq %rcx,72(%rdi)
  4576. # qhasm: *(uint64 *)(workp + 80) = zp2
  4577. # asm 1: movq <zp2=int64#5,80(<workp=int64#1)
  4578. # asm 2: movq <zp2=%r8,80(<workp=%rdi)
  4579. movq %r8,80(%rdi)
  4580. # qhasm: *(uint64 *)(workp + 88) = zp3
  4581. # asm 1: movq <zp3=int64#6,88(<workp=int64#1)
  4582. # asm 2: movq <zp3=%r9,88(<workp=%rdi)
  4583. movq %r9,88(%rdi)
  4584. # qhasm: mulr4 = 0
  4585. # asm 1: mov $0,>mulr4=int64#2
  4586. # asm 2: mov $0,>mulr4=%rsi
  4587. mov $0,%rsi
  4588. # qhasm: mulr5 = 0
  4589. # asm 1: mov $0,>mulr5=int64#4
  4590. # asm 2: mov $0,>mulr5=%rcx
  4591. mov $0,%rcx
  4592. # qhasm: mulr6 = 0
  4593. # asm 1: mov $0,>mulr6=int64#5
  4594. # asm 2: mov $0,>mulr6=%r8
  4595. mov $0,%r8
  4596. # qhasm: mulr7 = 0
  4597. # asm 1: mov $0,>mulr7=int64#6
  4598. # asm 2: mov $0,>mulr7=%r9
  4599. mov $0,%r9
  4600. # qhasm: mulx0 = *(uint64 *)(workp + 64)
  4601. # asm 1: movq 64(<workp=int64#1),>mulx0=int64#8
  4602. # asm 2: movq 64(<workp=%rdi),>mulx0=%r10
  4603. movq 64(%rdi),%r10
  4604. # qhasm: mulrax = t50_stack
  4605. # asm 1: movq <t50_stack=stack64#24,>mulrax=int64#7
  4606. # asm 2: movq <t50_stack=184(%rsp),>mulrax=%rax
  4607. movq 184(%rsp),%rax
  4608. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  4609. # asm 1: mul <mulx0=int64#8
  4610. # asm 2: mul <mulx0=%r10
  4611. mul %r10
  4612. # qhasm: zp0 = mulrax
  4613. # asm 1: mov <mulrax=int64#7,>zp0=int64#9
  4614. # asm 2: mov <mulrax=%rax,>zp0=%r11
  4615. mov %rax,%r11
  4616. # qhasm: zp1 = mulrdx
  4617. # asm 1: mov <mulrdx=int64#3,>zp1=int64#10
  4618. # asm 2: mov <mulrdx=%rdx,>zp1=%r12
  4619. mov %rdx,%r12
  4620. # qhasm: mulrax = t51_stack
  4621. # asm 1: movq <t51_stack=stack64#25,>mulrax=int64#7
  4622. # asm 2: movq <t51_stack=192(%rsp),>mulrax=%rax
  4623. movq 192(%rsp),%rax
  4624. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  4625. # asm 1: mul <mulx0=int64#8
  4626. # asm 2: mul <mulx0=%r10
  4627. mul %r10
  4628. # qhasm: carry? zp1 += mulrax
  4629. # asm 1: add <mulrax=int64#7,<zp1=int64#10
  4630. # asm 2: add <mulrax=%rax,<zp1=%r12
  4631. add %rax,%r12
  4632. # qhasm: zp2 = 0
  4633. # asm 1: mov $0,>zp2=int64#11
  4634. # asm 2: mov $0,>zp2=%r13
  4635. mov $0,%r13
  4636. # qhasm: zp2 += mulrdx + carry
  4637. # asm 1: adc <mulrdx=int64#3,<zp2=int64#11
  4638. # asm 2: adc <mulrdx=%rdx,<zp2=%r13
  4639. adc %rdx,%r13
  4640. # qhasm: mulrax = t52_stack
  4641. # asm 1: movq <t52_stack=stack64#26,>mulrax=int64#7
  4642. # asm 2: movq <t52_stack=200(%rsp),>mulrax=%rax
  4643. movq 200(%rsp),%rax
  4644. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  4645. # asm 1: mul <mulx0=int64#8
  4646. # asm 2: mul <mulx0=%r10
  4647. mul %r10
  4648. # qhasm: carry? zp2 += mulrax
  4649. # asm 1: add <mulrax=int64#7,<zp2=int64#11
  4650. # asm 2: add <mulrax=%rax,<zp2=%r13
  4651. add %rax,%r13
  4652. # qhasm: zp3 = 0
  4653. # asm 1: mov $0,>zp3=int64#12
  4654. # asm 2: mov $0,>zp3=%r14
  4655. mov $0,%r14
  4656. # qhasm: zp3 += mulrdx + carry
  4657. # asm 1: adc <mulrdx=int64#3,<zp3=int64#12
  4658. # asm 2: adc <mulrdx=%rdx,<zp3=%r14
  4659. adc %rdx,%r14
  4660. # qhasm: mulrax = t53_stack
  4661. # asm 1: movq <t53_stack=stack64#27,>mulrax=int64#7
  4662. # asm 2: movq <t53_stack=208(%rsp),>mulrax=%rax
  4663. movq 208(%rsp),%rax
  4664. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx0
  4665. # asm 1: mul <mulx0=int64#8
  4666. # asm 2: mul <mulx0=%r10
  4667. mul %r10
  4668. # qhasm: carry? zp3 += mulrax
  4669. # asm 1: add <mulrax=int64#7,<zp3=int64#12
  4670. # asm 2: add <mulrax=%rax,<zp3=%r14
  4671. add %rax,%r14
  4672. # qhasm: mulr4 += mulrdx + carry
  4673. # asm 1: adc <mulrdx=int64#3,<mulr4=int64#2
  4674. # asm 2: adc <mulrdx=%rdx,<mulr4=%rsi
  4675. adc %rdx,%rsi
  4676. # qhasm: mulx1 = *(uint64 *)(workp + 72)
  4677. # asm 1: movq 72(<workp=int64#1),>mulx1=int64#8
  4678. # asm 2: movq 72(<workp=%rdi),>mulx1=%r10
  4679. movq 72(%rdi),%r10
  4680. # qhasm: mulrax = t50_stack
  4681. # asm 1: movq <t50_stack=stack64#24,>mulrax=int64#7
  4682. # asm 2: movq <t50_stack=184(%rsp),>mulrax=%rax
  4683. movq 184(%rsp),%rax
  4684. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4685. # asm 1: mul <mulx1=int64#8
  4686. # asm 2: mul <mulx1=%r10
  4687. mul %r10
  4688. # qhasm: carry? zp1 += mulrax
  4689. # asm 1: add <mulrax=int64#7,<zp1=int64#10
  4690. # asm 2: add <mulrax=%rax,<zp1=%r12
  4691. add %rax,%r12
  4692. # qhasm: mulc = 0
  4693. # asm 1: mov $0,>mulc=int64#13
  4694. # asm 2: mov $0,>mulc=%r15
  4695. mov $0,%r15
  4696. # qhasm: mulc += mulrdx + carry
  4697. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4698. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4699. adc %rdx,%r15
  4700. # qhasm: mulrax = t51_stack
  4701. # asm 1: movq <t51_stack=stack64#25,>mulrax=int64#7
  4702. # asm 2: movq <t51_stack=192(%rsp),>mulrax=%rax
  4703. movq 192(%rsp),%rax
  4704. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4705. # asm 1: mul <mulx1=int64#8
  4706. # asm 2: mul <mulx1=%r10
  4707. mul %r10
  4708. # qhasm: carry? zp2 += mulrax
  4709. # asm 1: add <mulrax=int64#7,<zp2=int64#11
  4710. # asm 2: add <mulrax=%rax,<zp2=%r13
  4711. add %rax,%r13
  4712. # qhasm: mulrdx += 0 + carry
  4713. # asm 1: adc $0,<mulrdx=int64#3
  4714. # asm 2: adc $0,<mulrdx=%rdx
  4715. adc $0,%rdx
  4716. # qhasm: carry? zp2 += mulc
  4717. # asm 1: add <mulc=int64#13,<zp2=int64#11
  4718. # asm 2: add <mulc=%r15,<zp2=%r13
  4719. add %r15,%r13
  4720. # qhasm: mulc = 0
  4721. # asm 1: mov $0,>mulc=int64#13
  4722. # asm 2: mov $0,>mulc=%r15
  4723. mov $0,%r15
  4724. # qhasm: mulc += mulrdx + carry
  4725. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4726. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4727. adc %rdx,%r15
  4728. # qhasm: mulrax = t52_stack
  4729. # asm 1: movq <t52_stack=stack64#26,>mulrax=int64#7
  4730. # asm 2: movq <t52_stack=200(%rsp),>mulrax=%rax
  4731. movq 200(%rsp),%rax
  4732. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4733. # asm 1: mul <mulx1=int64#8
  4734. # asm 2: mul <mulx1=%r10
  4735. mul %r10
  4736. # qhasm: carry? zp3 += mulrax
  4737. # asm 1: add <mulrax=int64#7,<zp3=int64#12
  4738. # asm 2: add <mulrax=%rax,<zp3=%r14
  4739. add %rax,%r14
  4740. # qhasm: mulrdx += 0 + carry
  4741. # asm 1: adc $0,<mulrdx=int64#3
  4742. # asm 2: adc $0,<mulrdx=%rdx
  4743. adc $0,%rdx
  4744. # qhasm: carry? zp3 += mulc
  4745. # asm 1: add <mulc=int64#13,<zp3=int64#12
  4746. # asm 2: add <mulc=%r15,<zp3=%r14
  4747. add %r15,%r14
  4748. # qhasm: mulc = 0
  4749. # asm 1: mov $0,>mulc=int64#13
  4750. # asm 2: mov $0,>mulc=%r15
  4751. mov $0,%r15
  4752. # qhasm: mulc += mulrdx + carry
  4753. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4754. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4755. adc %rdx,%r15
  4756. # qhasm: mulrax = t53_stack
  4757. # asm 1: movq <t53_stack=stack64#27,>mulrax=int64#7
  4758. # asm 2: movq <t53_stack=208(%rsp),>mulrax=%rax
  4759. movq 208(%rsp),%rax
  4760. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx1
  4761. # asm 1: mul <mulx1=int64#8
  4762. # asm 2: mul <mulx1=%r10
  4763. mul %r10
  4764. # qhasm: carry? mulr4 += mulrax
  4765. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4766. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4767. add %rax,%rsi
  4768. # qhasm: mulrdx += 0 + carry
  4769. # asm 1: adc $0,<mulrdx=int64#3
  4770. # asm 2: adc $0,<mulrdx=%rdx
  4771. adc $0,%rdx
  4772. # qhasm: carry? mulr4 += mulc
  4773. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4774. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4775. add %r15,%rsi
  4776. # qhasm: mulr5 += mulrdx + carry
  4777. # asm 1: adc <mulrdx=int64#3,<mulr5=int64#4
  4778. # asm 2: adc <mulrdx=%rdx,<mulr5=%rcx
  4779. adc %rdx,%rcx
  4780. # qhasm: mulx2 = *(uint64 *)(workp + 80)
  4781. # asm 1: movq 80(<workp=int64#1),>mulx2=int64#8
  4782. # asm 2: movq 80(<workp=%rdi),>mulx2=%r10
  4783. movq 80(%rdi),%r10
  4784. # qhasm: mulrax = t50_stack
  4785. # asm 1: movq <t50_stack=stack64#24,>mulrax=int64#7
  4786. # asm 2: movq <t50_stack=184(%rsp),>mulrax=%rax
  4787. movq 184(%rsp),%rax
  4788. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4789. # asm 1: mul <mulx2=int64#8
  4790. # asm 2: mul <mulx2=%r10
  4791. mul %r10
  4792. # qhasm: carry? zp2 += mulrax
  4793. # asm 1: add <mulrax=int64#7,<zp2=int64#11
  4794. # asm 2: add <mulrax=%rax,<zp2=%r13
  4795. add %rax,%r13
  4796. # qhasm: mulc = 0
  4797. # asm 1: mov $0,>mulc=int64#13
  4798. # asm 2: mov $0,>mulc=%r15
  4799. mov $0,%r15
  4800. # qhasm: mulc += mulrdx + carry
  4801. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4802. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4803. adc %rdx,%r15
  4804. # qhasm: mulrax = t51_stack
  4805. # asm 1: movq <t51_stack=stack64#25,>mulrax=int64#7
  4806. # asm 2: movq <t51_stack=192(%rsp),>mulrax=%rax
  4807. movq 192(%rsp),%rax
  4808. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4809. # asm 1: mul <mulx2=int64#8
  4810. # asm 2: mul <mulx2=%r10
  4811. mul %r10
  4812. # qhasm: carry? zp3 += mulrax
  4813. # asm 1: add <mulrax=int64#7,<zp3=int64#12
  4814. # asm 2: add <mulrax=%rax,<zp3=%r14
  4815. add %rax,%r14
  4816. # qhasm: mulrdx += 0 + carry
  4817. # asm 1: adc $0,<mulrdx=int64#3
  4818. # asm 2: adc $0,<mulrdx=%rdx
  4819. adc $0,%rdx
  4820. # qhasm: carry? zp3 += mulc
  4821. # asm 1: add <mulc=int64#13,<zp3=int64#12
  4822. # asm 2: add <mulc=%r15,<zp3=%r14
  4823. add %r15,%r14
  4824. # qhasm: mulc = 0
  4825. # asm 1: mov $0,>mulc=int64#13
  4826. # asm 2: mov $0,>mulc=%r15
  4827. mov $0,%r15
  4828. # qhasm: mulc += mulrdx + carry
  4829. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4830. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4831. adc %rdx,%r15
  4832. # qhasm: mulrax = t52_stack
  4833. # asm 1: movq <t52_stack=stack64#26,>mulrax=int64#7
  4834. # asm 2: movq <t52_stack=200(%rsp),>mulrax=%rax
  4835. movq 200(%rsp),%rax
  4836. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4837. # asm 1: mul <mulx2=int64#8
  4838. # asm 2: mul <mulx2=%r10
  4839. mul %r10
  4840. # qhasm: carry? mulr4 += mulrax
  4841. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4842. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4843. add %rax,%rsi
  4844. # qhasm: mulrdx += 0 + carry
  4845. # asm 1: adc $0,<mulrdx=int64#3
  4846. # asm 2: adc $0,<mulrdx=%rdx
  4847. adc $0,%rdx
  4848. # qhasm: carry? mulr4 += mulc
  4849. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4850. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4851. add %r15,%rsi
  4852. # qhasm: mulc = 0
  4853. # asm 1: mov $0,>mulc=int64#13
  4854. # asm 2: mov $0,>mulc=%r15
  4855. mov $0,%r15
  4856. # qhasm: mulc += mulrdx + carry
  4857. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4858. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4859. adc %rdx,%r15
  4860. # qhasm: mulrax = t53_stack
  4861. # asm 1: movq <t53_stack=stack64#27,>mulrax=int64#7
  4862. # asm 2: movq <t53_stack=208(%rsp),>mulrax=%rax
  4863. movq 208(%rsp),%rax
  4864. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx2
  4865. # asm 1: mul <mulx2=int64#8
  4866. # asm 2: mul <mulx2=%r10
  4867. mul %r10
  4868. # qhasm: carry? mulr5 += mulrax
  4869. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  4870. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  4871. add %rax,%rcx
  4872. # qhasm: mulrdx += 0 + carry
  4873. # asm 1: adc $0,<mulrdx=int64#3
  4874. # asm 2: adc $0,<mulrdx=%rdx
  4875. adc $0,%rdx
  4876. # qhasm: carry? mulr5 += mulc
  4877. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  4878. # asm 2: add <mulc=%r15,<mulr5=%rcx
  4879. add %r15,%rcx
  4880. # qhasm: mulr6 += mulrdx + carry
  4881. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  4882. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  4883. adc %rdx,%r8
  4884. # qhasm: mulx3 = *(uint64 *)(workp + 88)
  4885. # asm 1: movq 88(<workp=int64#1),>mulx3=int64#8
  4886. # asm 2: movq 88(<workp=%rdi),>mulx3=%r10
  4887. movq 88(%rdi),%r10
  4888. # qhasm: mulrax = t50_stack
  4889. # asm 1: movq <t50_stack=stack64#24,>mulrax=int64#7
  4890. # asm 2: movq <t50_stack=184(%rsp),>mulrax=%rax
  4891. movq 184(%rsp),%rax
  4892. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4893. # asm 1: mul <mulx3=int64#8
  4894. # asm 2: mul <mulx3=%r10
  4895. mul %r10
  4896. # qhasm: carry? zp3 += mulrax
  4897. # asm 1: add <mulrax=int64#7,<zp3=int64#12
  4898. # asm 2: add <mulrax=%rax,<zp3=%r14
  4899. add %rax,%r14
  4900. # qhasm: mulc = 0
  4901. # asm 1: mov $0,>mulc=int64#13
  4902. # asm 2: mov $0,>mulc=%r15
  4903. mov $0,%r15
  4904. # qhasm: mulc += mulrdx + carry
  4905. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4906. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4907. adc %rdx,%r15
  4908. # qhasm: mulrax = t51_stack
  4909. # asm 1: movq <t51_stack=stack64#25,>mulrax=int64#7
  4910. # asm 2: movq <t51_stack=192(%rsp),>mulrax=%rax
  4911. movq 192(%rsp),%rax
  4912. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4913. # asm 1: mul <mulx3=int64#8
  4914. # asm 2: mul <mulx3=%r10
  4915. mul %r10
  4916. # qhasm: carry? mulr4 += mulrax
  4917. # asm 1: add <mulrax=int64#7,<mulr4=int64#2
  4918. # asm 2: add <mulrax=%rax,<mulr4=%rsi
  4919. add %rax,%rsi
  4920. # qhasm: mulrdx += 0 + carry
  4921. # asm 1: adc $0,<mulrdx=int64#3
  4922. # asm 2: adc $0,<mulrdx=%rdx
  4923. adc $0,%rdx
  4924. # qhasm: carry? mulr4 += mulc
  4925. # asm 1: add <mulc=int64#13,<mulr4=int64#2
  4926. # asm 2: add <mulc=%r15,<mulr4=%rsi
  4927. add %r15,%rsi
  4928. # qhasm: mulc = 0
  4929. # asm 1: mov $0,>mulc=int64#13
  4930. # asm 2: mov $0,>mulc=%r15
  4931. mov $0,%r15
  4932. # qhasm: mulc += mulrdx + carry
  4933. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4934. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4935. adc %rdx,%r15
  4936. # qhasm: mulrax = t52_stack
  4937. # asm 1: movq <t52_stack=stack64#26,>mulrax=int64#7
  4938. # asm 2: movq <t52_stack=200(%rsp),>mulrax=%rax
  4939. movq 200(%rsp),%rax
  4940. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4941. # asm 1: mul <mulx3=int64#8
  4942. # asm 2: mul <mulx3=%r10
  4943. mul %r10
  4944. # qhasm: carry? mulr5 += mulrax
  4945. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  4946. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  4947. add %rax,%rcx
  4948. # qhasm: mulrdx += 0 + carry
  4949. # asm 1: adc $0,<mulrdx=int64#3
  4950. # asm 2: adc $0,<mulrdx=%rdx
  4951. adc $0,%rdx
  4952. # qhasm: carry? mulr5 += mulc
  4953. # asm 1: add <mulc=int64#13,<mulr5=int64#4
  4954. # asm 2: add <mulc=%r15,<mulr5=%rcx
  4955. add %r15,%rcx
  4956. # qhasm: mulc = 0
  4957. # asm 1: mov $0,>mulc=int64#13
  4958. # asm 2: mov $0,>mulc=%r15
  4959. mov $0,%r15
  4960. # qhasm: mulc += mulrdx + carry
  4961. # asm 1: adc <mulrdx=int64#3,<mulc=int64#13
  4962. # asm 2: adc <mulrdx=%rdx,<mulc=%r15
  4963. adc %rdx,%r15
  4964. # qhasm: mulrax = t53_stack
  4965. # asm 1: movq <t53_stack=stack64#27,>mulrax=int64#7
  4966. # asm 2: movq <t53_stack=208(%rsp),>mulrax=%rax
  4967. movq 208(%rsp),%rax
  4968. # qhasm: (uint128) mulrdx mulrax = mulrax * mulx3
  4969. # asm 1: mul <mulx3=int64#8
  4970. # asm 2: mul <mulx3=%r10
  4971. mul %r10
  4972. # qhasm: carry? mulr6 += mulrax
  4973. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  4974. # asm 2: add <mulrax=%rax,<mulr6=%r8
  4975. add %rax,%r8
  4976. # qhasm: mulrdx += 0 + carry
  4977. # asm 1: adc $0,<mulrdx=int64#3
  4978. # asm 2: adc $0,<mulrdx=%rdx
  4979. adc $0,%rdx
  4980. # qhasm: carry? mulr6 += mulc
  4981. # asm 1: add <mulc=int64#13,<mulr6=int64#5
  4982. # asm 2: add <mulc=%r15,<mulr6=%r8
  4983. add %r15,%r8
  4984. # qhasm: mulr7 += mulrdx + carry
  4985. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  4986. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  4987. adc %rdx,%r9
  4988. # qhasm: mulrax = mulr4
  4989. # asm 1: mov <mulr4=int64#2,>mulrax=int64#7
  4990. # asm 2: mov <mulr4=%rsi,>mulrax=%rax
  4991. mov %rsi,%rax
  4992. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  4993. mulq crypto_scalarmult_curve25519_amd64_64_38
  4994. # qhasm: mulr4 = mulrax
  4995. # asm 1: mov <mulrax=int64#7,>mulr4=int64#2
  4996. # asm 2: mov <mulrax=%rax,>mulr4=%rsi
  4997. mov %rax,%rsi
  4998. # qhasm: mulrax = mulr5
  4999. # asm 1: mov <mulr5=int64#4,>mulrax=int64#7
  5000. # asm 2: mov <mulr5=%rcx,>mulrax=%rax
  5001. mov %rcx,%rax
  5002. # qhasm: mulr5 = mulrdx
  5003. # asm 1: mov <mulrdx=int64#3,>mulr5=int64#4
  5004. # asm 2: mov <mulrdx=%rdx,>mulr5=%rcx
  5005. mov %rdx,%rcx
  5006. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  5007. mulq crypto_scalarmult_curve25519_amd64_64_38
  5008. # qhasm: carry? mulr5 += mulrax
  5009. # asm 1: add <mulrax=int64#7,<mulr5=int64#4
  5010. # asm 2: add <mulrax=%rax,<mulr5=%rcx
  5011. add %rax,%rcx
  5012. # qhasm: mulrax = mulr6
  5013. # asm 1: mov <mulr6=int64#5,>mulrax=int64#7
  5014. # asm 2: mov <mulr6=%r8,>mulrax=%rax
  5015. mov %r8,%rax
  5016. # qhasm: mulr6 = 0
  5017. # asm 1: mov $0,>mulr6=int64#5
  5018. # asm 2: mov $0,>mulr6=%r8
  5019. mov $0,%r8
  5020. # qhasm: mulr6 += mulrdx + carry
  5021. # asm 1: adc <mulrdx=int64#3,<mulr6=int64#5
  5022. # asm 2: adc <mulrdx=%rdx,<mulr6=%r8
  5023. adc %rdx,%r8
  5024. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  5025. mulq crypto_scalarmult_curve25519_amd64_64_38
  5026. # qhasm: carry? mulr6 += mulrax
  5027. # asm 1: add <mulrax=int64#7,<mulr6=int64#5
  5028. # asm 2: add <mulrax=%rax,<mulr6=%r8
  5029. add %rax,%r8
  5030. # qhasm: mulrax = mulr7
  5031. # asm 1: mov <mulr7=int64#6,>mulrax=int64#7
  5032. # asm 2: mov <mulr7=%r9,>mulrax=%rax
  5033. mov %r9,%rax
  5034. # qhasm: mulr7 = 0
  5035. # asm 1: mov $0,>mulr7=int64#6
  5036. # asm 2: mov $0,>mulr7=%r9
  5037. mov $0,%r9
  5038. # qhasm: mulr7 += mulrdx + carry
  5039. # asm 1: adc <mulrdx=int64#3,<mulr7=int64#6
  5040. # asm 2: adc <mulrdx=%rdx,<mulr7=%r9
  5041. adc %rdx,%r9
  5042. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)&crypto_scalarmult_curve25519_amd64_64_38
  5043. mulq crypto_scalarmult_curve25519_amd64_64_38
  5044. # qhasm: carry? mulr7 += mulrax
  5045. # asm 1: add <mulrax=int64#7,<mulr7=int64#6
  5046. # asm 2: add <mulrax=%rax,<mulr7=%r9
  5047. add %rax,%r9
  5048. # qhasm: mulr8 = 0
  5049. # asm 1: mov $0,>mulr8=int64#7
  5050. # asm 2: mov $0,>mulr8=%rax
  5051. mov $0,%rax
  5052. # qhasm: mulr8 += mulrdx + carry
  5053. # asm 1: adc <mulrdx=int64#3,<mulr8=int64#7
  5054. # asm 2: adc <mulrdx=%rdx,<mulr8=%rax
  5055. adc %rdx,%rax
  5056. # qhasm: carry? zp0 += mulr4
  5057. # asm 1: add <mulr4=int64#2,<zp0=int64#9
  5058. # asm 2: add <mulr4=%rsi,<zp0=%r11
  5059. add %rsi,%r11
  5060. # qhasm: carry? zp1 += mulr5 + carry
  5061. # asm 1: adc <mulr5=int64#4,<zp1=int64#10
  5062. # asm 2: adc <mulr5=%rcx,<zp1=%r12
  5063. adc %rcx,%r12
  5064. # qhasm: carry? zp2 += mulr6 + carry
  5065. # asm 1: adc <mulr6=int64#5,<zp2=int64#11
  5066. # asm 2: adc <mulr6=%r8,<zp2=%r13
  5067. adc %r8,%r13
  5068. # qhasm: carry? zp3 += mulr7 + carry
  5069. # asm 1: adc <mulr7=int64#6,<zp3=int64#12
  5070. # asm 2: adc <mulr7=%r9,<zp3=%r14
  5071. adc %r9,%r14
  5072. # qhasm: mulzero = 0
  5073. # asm 1: mov $0,>mulzero=int64#2
  5074. # asm 2: mov $0,>mulzero=%rsi
  5075. mov $0,%rsi
  5076. # qhasm: mulr8 += mulzero + carry
  5077. # asm 1: adc <mulzero=int64#2,<mulr8=int64#7
  5078. # asm 2: adc <mulzero=%rsi,<mulr8=%rax
  5079. adc %rsi,%rax
  5080. # qhasm: mulr8 *= 38
  5081. # asm 1: imulq $38,<mulr8=int64#7,>mulr8=int64#3
  5082. # asm 2: imulq $38,<mulr8=%rax,>mulr8=%rdx
  5083. imulq $38,%rax,%rdx
  5084. # qhasm: carry? zp0 += mulr8
  5085. # asm 1: add <mulr8=int64#3,<zp0=int64#9
  5086. # asm 2: add <mulr8=%rdx,<zp0=%r11
  5087. add %rdx,%r11
  5088. # qhasm: carry? zp1 += mulzero + carry
  5089. # asm 1: adc <mulzero=int64#2,<zp1=int64#10
  5090. # asm 2: adc <mulzero=%rsi,<zp1=%r12
  5091. adc %rsi,%r12
  5092. # qhasm: carry? zp2 += mulzero + carry
  5093. # asm 1: adc <mulzero=int64#2,<zp2=int64#11
  5094. # asm 2: adc <mulzero=%rsi,<zp2=%r13
  5095. adc %rsi,%r13
  5096. # qhasm: carry? zp3 += mulzero + carry
  5097. # asm 1: adc <mulzero=int64#2,<zp3=int64#12
  5098. # asm 2: adc <mulzero=%rsi,<zp3=%r14
  5099. adc %rsi,%r14
  5100. # qhasm: mulzero += mulzero + carry
  5101. # asm 1: adc <mulzero=int64#2,<mulzero=int64#2
  5102. # asm 2: adc <mulzero=%rsi,<mulzero=%rsi
  5103. adc %rsi,%rsi
  5104. # qhasm: mulzero *= 38
  5105. # asm 1: imulq $38,<mulzero=int64#2,>mulzero=int64#2
  5106. # asm 2: imulq $38,<mulzero=%rsi,>mulzero=%rsi
  5107. imulq $38,%rsi,%rsi
  5108. # qhasm: zp0 += mulzero
  5109. # asm 1: add <mulzero=int64#2,<zp0=int64#9
  5110. # asm 2: add <mulzero=%rsi,<zp0=%r11
  5111. add %rsi,%r11
  5112. # qhasm: *(uint64 *)(workp + 64) = zp0
  5113. # asm 1: movq <zp0=int64#9,64(<workp=int64#1)
  5114. # asm 2: movq <zp0=%r11,64(<workp=%rdi)
  5115. movq %r11,64(%rdi)
  5116. # qhasm: *(uint64 *)(workp + 72) = zp1
  5117. # asm 1: movq <zp1=int64#10,72(<workp=int64#1)
  5118. # asm 2: movq <zp1=%r12,72(<workp=%rdi)
  5119. movq %r12,72(%rdi)
  5120. # qhasm: *(uint64 *)(workp + 80) = zp2
  5121. # asm 1: movq <zp2=int64#11,80(<workp=int64#1)
  5122. # asm 2: movq <zp2=%r13,80(<workp=%rdi)
  5123. movq %r13,80(%rdi)
  5124. # qhasm: *(uint64 *)(workp + 88) = zp3
  5125. # asm 1: movq <zp3=int64#12,88(<workp=int64#1)
  5126. # asm 2: movq <zp3=%r14,88(<workp=%rdi)
  5127. movq %r14,88(%rdi)
  5128. # qhasm: caller1 = caller1_stack
  5129. # asm 1: movq <caller1_stack=stack64#1,>caller1=int64#9
  5130. # asm 2: movq <caller1_stack=0(%rsp),>caller1=%r11
  5131. movq 0(%rsp),%r11
  5132. # qhasm: caller2 = caller2_stack
  5133. # asm 1: movq <caller2_stack=stack64#2,>caller2=int64#10
  5134. # asm 2: movq <caller2_stack=8(%rsp),>caller2=%r12
  5135. movq 8(%rsp),%r12
  5136. # qhasm: caller3 = caller3_stack
  5137. # asm 1: movq <caller3_stack=stack64#3,>caller3=int64#11
  5138. # asm 2: movq <caller3_stack=16(%rsp),>caller3=%r13
  5139. movq 16(%rsp),%r13
  5140. # qhasm: caller4 = caller4_stack
  5141. # asm 1: movq <caller4_stack=stack64#4,>caller4=int64#12
  5142. # asm 2: movq <caller4_stack=24(%rsp),>caller4=%r14
  5143. movq 24(%rsp),%r14
  5144. # qhasm: caller5 = caller5_stack
  5145. # asm 1: movq <caller5_stack=stack64#5,>caller5=int64#13
  5146. # asm 2: movq <caller5_stack=32(%rsp),>caller5=%r15
  5147. movq 32(%rsp),%r15
  5148. # qhasm: caller6 = caller6_stack
  5149. # asm 1: movq <caller6_stack=stack64#6,>caller6=int64#14
  5150. # asm 2: movq <caller6_stack=40(%rsp),>caller6=%rbx
  5151. movq 40(%rsp),%rbx
  5152. # qhasm: caller7 = caller7_stack
  5153. # asm 1: movq <caller7_stack=stack64#7,>caller7=int64#15
  5154. # asm 2: movq <caller7_stack=48(%rsp),>caller7=%rbp
  5155. movq 48(%rsp),%rbp
  5156. # qhasm: leave
  5157. add %r11,%rsp
  5158. mov %rdi,%rax
  5159. mov %rsi,%rdx
  5160. ret