ladderstep.s 197 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195
  1. # qhasm: int64 workp
  2. # qhasm: input workp
  3. # qhasm: int64 caller1
  4. # qhasm: int64 caller2
  5. # qhasm: int64 caller3
  6. # qhasm: int64 caller4
  7. # qhasm: int64 caller5
  8. # qhasm: int64 caller6
  9. # qhasm: int64 caller7
  10. # qhasm: caller caller1
  11. # qhasm: caller caller2
  12. # qhasm: caller caller3
  13. # qhasm: caller caller4
  14. # qhasm: caller caller5
  15. # qhasm: caller caller6
  16. # qhasm: caller caller7
  17. # qhasm: stack64 caller1_stack
  18. # qhasm: stack64 caller2_stack
  19. # qhasm: stack64 caller3_stack
  20. # qhasm: stack64 caller4_stack
  21. # qhasm: stack64 caller5_stack
  22. # qhasm: stack64 caller6_stack
  23. # qhasm: stack64 caller7_stack
  24. # qhasm: int64 t10
  25. # qhasm: int64 t11
  26. # qhasm: int64 t12
  27. # qhasm: int64 t13
  28. # qhasm: int64 t14
  29. # qhasm: stack64 t10_stack
  30. # qhasm: stack64 t11_stack
  31. # qhasm: stack64 t12_stack
  32. # qhasm: stack64 t13_stack
  33. # qhasm: stack64 t14_stack
  34. # qhasm: int64 t20
  35. # qhasm: int64 t21
  36. # qhasm: int64 t22
  37. # qhasm: int64 t23
  38. # qhasm: int64 t24
  39. # qhasm: stack64 t20_stack
  40. # qhasm: stack64 t21_stack
  41. # qhasm: stack64 t22_stack
  42. # qhasm: stack64 t23_stack
  43. # qhasm: stack64 t24_stack
  44. # qhasm: int64 t30
  45. # qhasm: int64 t31
  46. # qhasm: int64 t32
  47. # qhasm: int64 t33
  48. # qhasm: int64 t34
  49. # qhasm: stack64 t30_stack
  50. # qhasm: stack64 t31_stack
  51. # qhasm: stack64 t32_stack
  52. # qhasm: stack64 t33_stack
  53. # qhasm: stack64 t34_stack
  54. # qhasm: int64 t40
  55. # qhasm: int64 t41
  56. # qhasm: int64 t42
  57. # qhasm: int64 t43
  58. # qhasm: int64 t44
  59. # qhasm: stack64 t40_stack
  60. # qhasm: stack64 t41_stack
  61. # qhasm: stack64 t42_stack
  62. # qhasm: stack64 t43_stack
  63. # qhasm: stack64 t44_stack
  64. # qhasm: int64 t50
  65. # qhasm: int64 t51
  66. # qhasm: int64 t52
  67. # qhasm: int64 t53
  68. # qhasm: int64 t54
  69. # qhasm: stack64 t50_stack
  70. # qhasm: stack64 t51_stack
  71. # qhasm: stack64 t52_stack
  72. # qhasm: stack64 t53_stack
  73. # qhasm: stack64 t54_stack
  74. # qhasm: int64 t60
  75. # qhasm: int64 t61
  76. # qhasm: int64 t62
  77. # qhasm: int64 t63
  78. # qhasm: int64 t64
  79. # qhasm: stack64 t60_stack
  80. # qhasm: stack64 t61_stack
  81. # qhasm: stack64 t62_stack
  82. # qhasm: stack64 t63_stack
  83. # qhasm: stack64 t64_stack
  84. # qhasm: int64 t70
  85. # qhasm: int64 t71
  86. # qhasm: int64 t72
  87. # qhasm: int64 t73
  88. # qhasm: int64 t74
  89. # qhasm: stack64 t70_stack
  90. # qhasm: stack64 t71_stack
  91. # qhasm: stack64 t72_stack
  92. # qhasm: stack64 t73_stack
  93. # qhasm: stack64 t74_stack
  94. # qhasm: int64 t80
  95. # qhasm: int64 t81
  96. # qhasm: int64 t82
  97. # qhasm: int64 t83
  98. # qhasm: int64 t84
  99. # qhasm: stack64 t80_stack
  100. # qhasm: stack64 t81_stack
  101. # qhasm: stack64 t82_stack
  102. # qhasm: stack64 t83_stack
  103. # qhasm: stack64 t84_stack
  104. # qhasm: int64 t90
  105. # qhasm: int64 t91
  106. # qhasm: int64 t92
  107. # qhasm: int64 t93
  108. # qhasm: int64 t94
  109. # qhasm: stack64 t90_stack
  110. # qhasm: stack64 t91_stack
  111. # qhasm: stack64 t92_stack
  112. # qhasm: stack64 t93_stack
  113. # qhasm: stack64 t94_stack
  114. # qhasm: int64 xp0
  115. # qhasm: int64 xp1
  116. # qhasm: int64 xp2
  117. # qhasm: int64 xp3
  118. # qhasm: int64 xp4
  119. # qhasm: int64 zp0
  120. # qhasm: int64 zp1
  121. # qhasm: int64 zp2
  122. # qhasm: int64 zp3
  123. # qhasm: int64 zp4
  124. # qhasm: int64 xq0
  125. # qhasm: int64 xq1
  126. # qhasm: int64 xq2
  127. # qhasm: int64 xq3
  128. # qhasm: int64 xq4
  129. # qhasm: int64 zq0
  130. # qhasm: int64 zq1
  131. # qhasm: int64 zq2
  132. # qhasm: int64 zq3
  133. # qhasm: int64 zq4
  134. # qhasm: int64 mulr01
  135. # qhasm: int64 mulr11
  136. # qhasm: int64 mulr21
  137. # qhasm: int64 mulr31
  138. # qhasm: int64 mulr41
  139. # qhasm: int64 mulrax
  140. # qhasm: int64 mulrdx
  141. # qhasm: int64 mult
  142. # qhasm: int64 mulredmask
  143. # qhasm: stack64 mulx219_stack
  144. # qhasm: stack64 mulx319_stack
  145. # qhasm: stack64 mulx419_stack
  146. # qhasm: int64 squarer01
  147. # qhasm: int64 squarer11
  148. # qhasm: int64 squarer21
  149. # qhasm: int64 squarer31
  150. # qhasm: int64 squarer41
  151. # qhasm: int64 squarerax
  152. # qhasm: int64 squarerdx
  153. # qhasm: int64 squaret
  154. # qhasm: int64 squareredmask
  155. # qhasm: int64 mul121666rax
  156. # qhasm: int64 mul121666rdx
  157. # qhasm: enter crypto_scalarmult_curve25519_amd64_51_ladderstep
  158. .text
  159. .p2align 5
  160. .globl _crypto_scalarmult_curve25519_amd64_51_ladderstep
  161. .globl crypto_scalarmult_curve25519_amd64_51_ladderstep
  162. _crypto_scalarmult_curve25519_amd64_51_ladderstep:
  163. crypto_scalarmult_curve25519_amd64_51_ladderstep:
  164. mov %rsp,%r11
  165. and $31,%r11
  166. add $352,%r11
  167. sub %r11,%rsp
  168. # qhasm: caller1_stack = caller1
  169. # asm 1: movq <caller1=int64#9,>caller1_stack=stack64#1
  170. # asm 2: movq <caller1=%r11,>caller1_stack=0(%rsp)
  171. movq %r11,0(%rsp)
  172. # qhasm: caller2_stack = caller2
  173. # asm 1: movq <caller2=int64#10,>caller2_stack=stack64#2
  174. # asm 2: movq <caller2=%r12,>caller2_stack=8(%rsp)
  175. movq %r12,8(%rsp)
  176. # qhasm: caller3_stack = caller3
  177. # asm 1: movq <caller3=int64#11,>caller3_stack=stack64#3
  178. # asm 2: movq <caller3=%r13,>caller3_stack=16(%rsp)
  179. movq %r13,16(%rsp)
  180. # qhasm: caller4_stack = caller4
  181. # asm 1: movq <caller4=int64#12,>caller4_stack=stack64#4
  182. # asm 2: movq <caller4=%r14,>caller4_stack=24(%rsp)
  183. movq %r14,24(%rsp)
  184. # qhasm: caller5_stack = caller5
  185. # asm 1: movq <caller5=int64#13,>caller5_stack=stack64#5
  186. # asm 2: movq <caller5=%r15,>caller5_stack=32(%rsp)
  187. movq %r15,32(%rsp)
  188. # qhasm: caller6_stack = caller6
  189. # asm 1: movq <caller6=int64#14,>caller6_stack=stack64#6
  190. # asm 2: movq <caller6=%rbx,>caller6_stack=40(%rsp)
  191. movq %rbx,40(%rsp)
  192. # qhasm: caller7_stack = caller7
  193. # asm 1: movq <caller7=int64#15,>caller7_stack=stack64#7
  194. # asm 2: movq <caller7=%rbp,>caller7_stack=48(%rsp)
  195. movq %rbp,48(%rsp)
  196. # qhasm: t10 = *(uint64 *)(workp + 40)
  197. # asm 1: movq 40(<workp=int64#1),>t10=int64#2
  198. # asm 2: movq 40(<workp=%rdi),>t10=%rsi
  199. movq 40(%rdi),%rsi
  200. # qhasm: t11 = *(uint64 *)(workp + 48)
  201. # asm 1: movq 48(<workp=int64#1),>t11=int64#3
  202. # asm 2: movq 48(<workp=%rdi),>t11=%rdx
  203. movq 48(%rdi),%rdx
  204. # qhasm: t12 = *(uint64 *)(workp + 56)
  205. # asm 1: movq 56(<workp=int64#1),>t12=int64#4
  206. # asm 2: movq 56(<workp=%rdi),>t12=%rcx
  207. movq 56(%rdi),%rcx
  208. # qhasm: t13 = *(uint64 *)(workp + 64)
  209. # asm 1: movq 64(<workp=int64#1),>t13=int64#5
  210. # asm 2: movq 64(<workp=%rdi),>t13=%r8
  211. movq 64(%rdi),%r8
  212. # qhasm: t14 = *(uint64 *)(workp + 72)
  213. # asm 1: movq 72(<workp=int64#1),>t14=int64#6
  214. # asm 2: movq 72(<workp=%rdi),>t14=%r9
  215. movq 72(%rdi),%r9
  216. # qhasm: t20 = t10
  217. # asm 1: mov <t10=int64#2,>t20=int64#7
  218. # asm 2: mov <t10=%rsi,>t20=%rax
  219. mov %rsi,%rax
  220. # qhasm: t21 = t11
  221. # asm 1: mov <t11=int64#3,>t21=int64#8
  222. # asm 2: mov <t11=%rdx,>t21=%r10
  223. mov %rdx,%r10
  224. # qhasm: t22 = t12
  225. # asm 1: mov <t12=int64#4,>t22=int64#9
  226. # asm 2: mov <t12=%rcx,>t22=%r11
  227. mov %rcx,%r11
  228. # qhasm: t23 = t13
  229. # asm 1: mov <t13=int64#5,>t23=int64#10
  230. # asm 2: mov <t13=%r8,>t23=%r12
  231. mov %r8,%r12
  232. # qhasm: t24 = t14
  233. # asm 1: mov <t14=int64#6,>t24=int64#11
  234. # asm 2: mov <t14=%r9,>t24=%r13
  235. mov %r9,%r13
  236. # qhasm: t20 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P0
  237. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P0,<t20=int64#7
  238. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P0,<t20=%rax
  239. add crypto_scalarmult_curve25519_amd64_51_2P0,%rax
  240. # qhasm: t21 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  241. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t21=int64#8
  242. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t21=%r10
  243. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r10
  244. # qhasm: t22 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  245. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t22=int64#9
  246. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t22=%r11
  247. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r11
  248. # qhasm: t23 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  249. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t23=int64#10
  250. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t23=%r12
  251. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r12
  252. # qhasm: t24 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  253. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t24=int64#11
  254. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t24=%r13
  255. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r13
  256. # qhasm: t10 += *(uint64 *)(workp + 80)
  257. # asm 1: addq 80(<workp=int64#1),<t10=int64#2
  258. # asm 2: addq 80(<workp=%rdi),<t10=%rsi
  259. addq 80(%rdi),%rsi
  260. # qhasm: t11 += *(uint64 *)(workp + 88)
  261. # asm 1: addq 88(<workp=int64#1),<t11=int64#3
  262. # asm 2: addq 88(<workp=%rdi),<t11=%rdx
  263. addq 88(%rdi),%rdx
  264. # qhasm: t12 += *(uint64 *)(workp + 96)
  265. # asm 1: addq 96(<workp=int64#1),<t12=int64#4
  266. # asm 2: addq 96(<workp=%rdi),<t12=%rcx
  267. addq 96(%rdi),%rcx
  268. # qhasm: t13 += *(uint64 *)(workp + 104)
  269. # asm 1: addq 104(<workp=int64#1),<t13=int64#5
  270. # asm 2: addq 104(<workp=%rdi),<t13=%r8
  271. addq 104(%rdi),%r8
  272. # qhasm: t14 += *(uint64 *)(workp + 112)
  273. # asm 1: addq 112(<workp=int64#1),<t14=int64#6
  274. # asm 2: addq 112(<workp=%rdi),<t14=%r9
  275. addq 112(%rdi),%r9
  276. # qhasm: t20 -= *(uint64 *)(workp + 80)
  277. # asm 1: subq 80(<workp=int64#1),<t20=int64#7
  278. # asm 2: subq 80(<workp=%rdi),<t20=%rax
  279. subq 80(%rdi),%rax
  280. # qhasm: t21 -= *(uint64 *)(workp + 88)
  281. # asm 1: subq 88(<workp=int64#1),<t21=int64#8
  282. # asm 2: subq 88(<workp=%rdi),<t21=%r10
  283. subq 88(%rdi),%r10
  284. # qhasm: t22 -= *(uint64 *)(workp + 96)
  285. # asm 1: subq 96(<workp=int64#1),<t22=int64#9
  286. # asm 2: subq 96(<workp=%rdi),<t22=%r11
  287. subq 96(%rdi),%r11
  288. # qhasm: t23 -= *(uint64 *)(workp + 104)
  289. # asm 1: subq 104(<workp=int64#1),<t23=int64#10
  290. # asm 2: subq 104(<workp=%rdi),<t23=%r12
  291. subq 104(%rdi),%r12
  292. # qhasm: t24 -= *(uint64 *)(workp + 112)
  293. # asm 1: subq 112(<workp=int64#1),<t24=int64#11
  294. # asm 2: subq 112(<workp=%rdi),<t24=%r13
  295. subq 112(%rdi),%r13
  296. # qhasm: t10_stack = t10
  297. # asm 1: movq <t10=int64#2,>t10_stack=stack64#8
  298. # asm 2: movq <t10=%rsi,>t10_stack=56(%rsp)
  299. movq %rsi,56(%rsp)
  300. # qhasm: t11_stack = t11
  301. # asm 1: movq <t11=int64#3,>t11_stack=stack64#9
  302. # asm 2: movq <t11=%rdx,>t11_stack=64(%rsp)
  303. movq %rdx,64(%rsp)
  304. # qhasm: t12_stack = t12
  305. # asm 1: movq <t12=int64#4,>t12_stack=stack64#10
  306. # asm 2: movq <t12=%rcx,>t12_stack=72(%rsp)
  307. movq %rcx,72(%rsp)
  308. # qhasm: t13_stack = t13
  309. # asm 1: movq <t13=int64#5,>t13_stack=stack64#11
  310. # asm 2: movq <t13=%r8,>t13_stack=80(%rsp)
  311. movq %r8,80(%rsp)
  312. # qhasm: t14_stack = t14
  313. # asm 1: movq <t14=int64#6,>t14_stack=stack64#12
  314. # asm 2: movq <t14=%r9,>t14_stack=88(%rsp)
  315. movq %r9,88(%rsp)
  316. # qhasm: t20_stack = t20
  317. # asm 1: movq <t20=int64#7,>t20_stack=stack64#13
  318. # asm 2: movq <t20=%rax,>t20_stack=96(%rsp)
  319. movq %rax,96(%rsp)
  320. # qhasm: t21_stack = t21
  321. # asm 1: movq <t21=int64#8,>t21_stack=stack64#14
  322. # asm 2: movq <t21=%r10,>t21_stack=104(%rsp)
  323. movq %r10,104(%rsp)
  324. # qhasm: t22_stack = t22
  325. # asm 1: movq <t22=int64#9,>t22_stack=stack64#15
  326. # asm 2: movq <t22=%r11,>t22_stack=112(%rsp)
  327. movq %r11,112(%rsp)
  328. # qhasm: t23_stack = t23
  329. # asm 1: movq <t23=int64#10,>t23_stack=stack64#16
  330. # asm 2: movq <t23=%r12,>t23_stack=120(%rsp)
  331. movq %r12,120(%rsp)
  332. # qhasm: t24_stack = t24
  333. # asm 1: movq <t24=int64#11,>t24_stack=stack64#17
  334. # asm 2: movq <t24=%r13,>t24_stack=128(%rsp)
  335. movq %r13,128(%rsp)
  336. # qhasm: squarerax = t20_stack
  337. # asm 1: movq <t20_stack=stack64#13,>squarerax=int64#7
  338. # asm 2: movq <t20_stack=96(%rsp),>squarerax=%rax
  339. movq 96(%rsp),%rax
  340. # qhasm: (uint128) squarerdx squarerax = squarerax * t20_stack
  341. # asm 1: mulq <t20_stack=stack64#13
  342. # asm 2: mulq <t20_stack=96(%rsp)
  343. mulq 96(%rsp)
  344. # qhasm: t70 = squarerax
  345. # asm 1: mov <squarerax=int64#7,>t70=int64#2
  346. # asm 2: mov <squarerax=%rax,>t70=%rsi
  347. mov %rax,%rsi
  348. # qhasm: squarer01 = squarerdx
  349. # asm 1: mov <squarerdx=int64#3,>squarer01=int64#4
  350. # asm 2: mov <squarerdx=%rdx,>squarer01=%rcx
  351. mov %rdx,%rcx
  352. # qhasm: squarerax = t20_stack
  353. # asm 1: movq <t20_stack=stack64#13,>squarerax=int64#7
  354. # asm 2: movq <t20_stack=96(%rsp),>squarerax=%rax
  355. movq 96(%rsp),%rax
  356. # qhasm: squarerax <<= 1
  357. # asm 1: shl $1,<squarerax=int64#7
  358. # asm 2: shl $1,<squarerax=%rax
  359. shl $1,%rax
  360. # qhasm: (uint128) squarerdx squarerax = squarerax * t21_stack
  361. # asm 1: mulq <t21_stack=stack64#14
  362. # asm 2: mulq <t21_stack=104(%rsp)
  363. mulq 104(%rsp)
  364. # qhasm: t71 = squarerax
  365. # asm 1: mov <squarerax=int64#7,>t71=int64#5
  366. # asm 2: mov <squarerax=%rax,>t71=%r8
  367. mov %rax,%r8
  368. # qhasm: squarer11 = squarerdx
  369. # asm 1: mov <squarerdx=int64#3,>squarer11=int64#6
  370. # asm 2: mov <squarerdx=%rdx,>squarer11=%r9
  371. mov %rdx,%r9
  372. # qhasm: squarerax = t20_stack
  373. # asm 1: movq <t20_stack=stack64#13,>squarerax=int64#7
  374. # asm 2: movq <t20_stack=96(%rsp),>squarerax=%rax
  375. movq 96(%rsp),%rax
  376. # qhasm: squarerax <<= 1
  377. # asm 1: shl $1,<squarerax=int64#7
  378. # asm 2: shl $1,<squarerax=%rax
  379. shl $1,%rax
  380. # qhasm: (uint128) squarerdx squarerax = squarerax * t22_stack
  381. # asm 1: mulq <t22_stack=stack64#15
  382. # asm 2: mulq <t22_stack=112(%rsp)
  383. mulq 112(%rsp)
  384. # qhasm: t72 = squarerax
  385. # asm 1: mov <squarerax=int64#7,>t72=int64#8
  386. # asm 2: mov <squarerax=%rax,>t72=%r10
  387. mov %rax,%r10
  388. # qhasm: squarer21 = squarerdx
  389. # asm 1: mov <squarerdx=int64#3,>squarer21=int64#9
  390. # asm 2: mov <squarerdx=%rdx,>squarer21=%r11
  391. mov %rdx,%r11
  392. # qhasm: squarerax = t20_stack
  393. # asm 1: movq <t20_stack=stack64#13,>squarerax=int64#7
  394. # asm 2: movq <t20_stack=96(%rsp),>squarerax=%rax
  395. movq 96(%rsp),%rax
  396. # qhasm: squarerax <<= 1
  397. # asm 1: shl $1,<squarerax=int64#7
  398. # asm 2: shl $1,<squarerax=%rax
  399. shl $1,%rax
  400. # qhasm: (uint128) squarerdx squarerax = squarerax * t23_stack
  401. # asm 1: mulq <t23_stack=stack64#16
  402. # asm 2: mulq <t23_stack=120(%rsp)
  403. mulq 120(%rsp)
  404. # qhasm: t73 = squarerax
  405. # asm 1: mov <squarerax=int64#7,>t73=int64#10
  406. # asm 2: mov <squarerax=%rax,>t73=%r12
  407. mov %rax,%r12
  408. # qhasm: squarer31 = squarerdx
  409. # asm 1: mov <squarerdx=int64#3,>squarer31=int64#11
  410. # asm 2: mov <squarerdx=%rdx,>squarer31=%r13
  411. mov %rdx,%r13
  412. # qhasm: squarerax = t20_stack
  413. # asm 1: movq <t20_stack=stack64#13,>squarerax=int64#7
  414. # asm 2: movq <t20_stack=96(%rsp),>squarerax=%rax
  415. movq 96(%rsp),%rax
  416. # qhasm: squarerax <<= 1
  417. # asm 1: shl $1,<squarerax=int64#7
  418. # asm 2: shl $1,<squarerax=%rax
  419. shl $1,%rax
  420. # qhasm: (uint128) squarerdx squarerax = squarerax * t24_stack
  421. # asm 1: mulq <t24_stack=stack64#17
  422. # asm 2: mulq <t24_stack=128(%rsp)
  423. mulq 128(%rsp)
  424. # qhasm: t74 = squarerax
  425. # asm 1: mov <squarerax=int64#7,>t74=int64#12
  426. # asm 2: mov <squarerax=%rax,>t74=%r14
  427. mov %rax,%r14
  428. # qhasm: squarer41 = squarerdx
  429. # asm 1: mov <squarerdx=int64#3,>squarer41=int64#13
  430. # asm 2: mov <squarerdx=%rdx,>squarer41=%r15
  431. mov %rdx,%r15
  432. # qhasm: squarerax = t21_stack
  433. # asm 1: movq <t21_stack=stack64#14,>squarerax=int64#7
  434. # asm 2: movq <t21_stack=104(%rsp),>squarerax=%rax
  435. movq 104(%rsp),%rax
  436. # qhasm: (uint128) squarerdx squarerax = squarerax * t21_stack
  437. # asm 1: mulq <t21_stack=stack64#14
  438. # asm 2: mulq <t21_stack=104(%rsp)
  439. mulq 104(%rsp)
  440. # qhasm: carry? t72 += squarerax
  441. # asm 1: add <squarerax=int64#7,<t72=int64#8
  442. # asm 2: add <squarerax=%rax,<t72=%r10
  443. add %rax,%r10
  444. # qhasm: squarer21 += squarerdx + carry
  445. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  446. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  447. adc %rdx,%r11
  448. # qhasm: squarerax = t21_stack
  449. # asm 1: movq <t21_stack=stack64#14,>squarerax=int64#7
  450. # asm 2: movq <t21_stack=104(%rsp),>squarerax=%rax
  451. movq 104(%rsp),%rax
  452. # qhasm: squarerax <<= 1
  453. # asm 1: shl $1,<squarerax=int64#7
  454. # asm 2: shl $1,<squarerax=%rax
  455. shl $1,%rax
  456. # qhasm: (uint128) squarerdx squarerax = squarerax * t22_stack
  457. # asm 1: mulq <t22_stack=stack64#15
  458. # asm 2: mulq <t22_stack=112(%rsp)
  459. mulq 112(%rsp)
  460. # qhasm: carry? t73 += squarerax
  461. # asm 1: add <squarerax=int64#7,<t73=int64#10
  462. # asm 2: add <squarerax=%rax,<t73=%r12
  463. add %rax,%r12
  464. # qhasm: squarer31 += squarerdx + carry
  465. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  466. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  467. adc %rdx,%r13
  468. # qhasm: squarerax = t21_stack
  469. # asm 1: movq <t21_stack=stack64#14,>squarerax=int64#7
  470. # asm 2: movq <t21_stack=104(%rsp),>squarerax=%rax
  471. movq 104(%rsp),%rax
  472. # qhasm: squarerax <<= 1
  473. # asm 1: shl $1,<squarerax=int64#7
  474. # asm 2: shl $1,<squarerax=%rax
  475. shl $1,%rax
  476. # qhasm: (uint128) squarerdx squarerax = squarerax * t23_stack
  477. # asm 1: mulq <t23_stack=stack64#16
  478. # asm 2: mulq <t23_stack=120(%rsp)
  479. mulq 120(%rsp)
  480. # qhasm: carry? t74 += squarerax
  481. # asm 1: add <squarerax=int64#7,<t74=int64#12
  482. # asm 2: add <squarerax=%rax,<t74=%r14
  483. add %rax,%r14
  484. # qhasm: squarer41 += squarerdx + carry
  485. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  486. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  487. adc %rdx,%r15
  488. # qhasm: squarerax = t21_stack
  489. # asm 1: movq <t21_stack=stack64#14,>squarerax=int64#3
  490. # asm 2: movq <t21_stack=104(%rsp),>squarerax=%rdx
  491. movq 104(%rsp),%rdx
  492. # qhasm: squarerax *= 38
  493. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  494. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  495. imulq $38,%rdx,%rax
  496. # qhasm: (uint128) squarerdx squarerax = squarerax * t24_stack
  497. # asm 1: mulq <t24_stack=stack64#17
  498. # asm 2: mulq <t24_stack=128(%rsp)
  499. mulq 128(%rsp)
  500. # qhasm: carry? t70 += squarerax
  501. # asm 1: add <squarerax=int64#7,<t70=int64#2
  502. # asm 2: add <squarerax=%rax,<t70=%rsi
  503. add %rax,%rsi
  504. # qhasm: squarer01 += squarerdx + carry
  505. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  506. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  507. adc %rdx,%rcx
  508. # qhasm: squarerax = t22_stack
  509. # asm 1: movq <t22_stack=stack64#15,>squarerax=int64#7
  510. # asm 2: movq <t22_stack=112(%rsp),>squarerax=%rax
  511. movq 112(%rsp),%rax
  512. # qhasm: (uint128) squarerdx squarerax = squarerax * t22_stack
  513. # asm 1: mulq <t22_stack=stack64#15
  514. # asm 2: mulq <t22_stack=112(%rsp)
  515. mulq 112(%rsp)
  516. # qhasm: carry? t74 += squarerax
  517. # asm 1: add <squarerax=int64#7,<t74=int64#12
  518. # asm 2: add <squarerax=%rax,<t74=%r14
  519. add %rax,%r14
  520. # qhasm: squarer41 += squarerdx + carry
  521. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  522. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  523. adc %rdx,%r15
  524. # qhasm: squarerax = t22_stack
  525. # asm 1: movq <t22_stack=stack64#15,>squarerax=int64#3
  526. # asm 2: movq <t22_stack=112(%rsp),>squarerax=%rdx
  527. movq 112(%rsp),%rdx
  528. # qhasm: squarerax *= 38
  529. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  530. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  531. imulq $38,%rdx,%rax
  532. # qhasm: (uint128) squarerdx squarerax = squarerax * t23_stack
  533. # asm 1: mulq <t23_stack=stack64#16
  534. # asm 2: mulq <t23_stack=120(%rsp)
  535. mulq 120(%rsp)
  536. # qhasm: carry? t70 += squarerax
  537. # asm 1: add <squarerax=int64#7,<t70=int64#2
  538. # asm 2: add <squarerax=%rax,<t70=%rsi
  539. add %rax,%rsi
  540. # qhasm: squarer01 += squarerdx + carry
  541. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  542. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  543. adc %rdx,%rcx
  544. # qhasm: squarerax = t22_stack
  545. # asm 1: movq <t22_stack=stack64#15,>squarerax=int64#3
  546. # asm 2: movq <t22_stack=112(%rsp),>squarerax=%rdx
  547. movq 112(%rsp),%rdx
  548. # qhasm: squarerax *= 38
  549. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  550. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  551. imulq $38,%rdx,%rax
  552. # qhasm: (uint128) squarerdx squarerax = squarerax * t24_stack
  553. # asm 1: mulq <t24_stack=stack64#17
  554. # asm 2: mulq <t24_stack=128(%rsp)
  555. mulq 128(%rsp)
  556. # qhasm: carry? t71 += squarerax
  557. # asm 1: add <squarerax=int64#7,<t71=int64#5
  558. # asm 2: add <squarerax=%rax,<t71=%r8
  559. add %rax,%r8
  560. # qhasm: squarer11 += squarerdx + carry
  561. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  562. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  563. adc %rdx,%r9
  564. # qhasm: squarerax = t23_stack
  565. # asm 1: movq <t23_stack=stack64#16,>squarerax=int64#3
  566. # asm 2: movq <t23_stack=120(%rsp),>squarerax=%rdx
  567. movq 120(%rsp),%rdx
  568. # qhasm: squarerax *= 19
  569. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  570. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  571. imulq $19,%rdx,%rax
  572. # qhasm: (uint128) squarerdx squarerax = squarerax * t23_stack
  573. # asm 1: mulq <t23_stack=stack64#16
  574. # asm 2: mulq <t23_stack=120(%rsp)
  575. mulq 120(%rsp)
  576. # qhasm: carry? t71 += squarerax
  577. # asm 1: add <squarerax=int64#7,<t71=int64#5
  578. # asm 2: add <squarerax=%rax,<t71=%r8
  579. add %rax,%r8
  580. # qhasm: squarer11 += squarerdx + carry
  581. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  582. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  583. adc %rdx,%r9
  584. # qhasm: squarerax = t23_stack
  585. # asm 1: movq <t23_stack=stack64#16,>squarerax=int64#3
  586. # asm 2: movq <t23_stack=120(%rsp),>squarerax=%rdx
  587. movq 120(%rsp),%rdx
  588. # qhasm: squarerax *= 38
  589. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  590. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  591. imulq $38,%rdx,%rax
  592. # qhasm: (uint128) squarerdx squarerax = squarerax * t24_stack
  593. # asm 1: mulq <t24_stack=stack64#17
  594. # asm 2: mulq <t24_stack=128(%rsp)
  595. mulq 128(%rsp)
  596. # qhasm: carry? t72 += squarerax
  597. # asm 1: add <squarerax=int64#7,<t72=int64#8
  598. # asm 2: add <squarerax=%rax,<t72=%r10
  599. add %rax,%r10
  600. # qhasm: squarer21 += squarerdx + carry
  601. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  602. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  603. adc %rdx,%r11
  604. # qhasm: squarerax = t24_stack
  605. # asm 1: movq <t24_stack=stack64#17,>squarerax=int64#3
  606. # asm 2: movq <t24_stack=128(%rsp),>squarerax=%rdx
  607. movq 128(%rsp),%rdx
  608. # qhasm: squarerax *= 19
  609. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  610. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  611. imulq $19,%rdx,%rax
  612. # qhasm: (uint128) squarerdx squarerax = squarerax * t24_stack
  613. # asm 1: mulq <t24_stack=stack64#17
  614. # asm 2: mulq <t24_stack=128(%rsp)
  615. mulq 128(%rsp)
  616. # qhasm: carry? t73 += squarerax
  617. # asm 1: add <squarerax=int64#7,<t73=int64#10
  618. # asm 2: add <squarerax=%rax,<t73=%r12
  619. add %rax,%r12
  620. # qhasm: squarer31 += squarerdx + carry
  621. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  622. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  623. adc %rdx,%r13
  624. # qhasm: squareredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  625. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=int64#3
  626. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=%rdx
  627. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  628. # qhasm: squarer01 = (squarer01.t70) << 13
  629. # asm 1: shld $13,<t70=int64#2,<squarer01=int64#4
  630. # asm 2: shld $13,<t70=%rsi,<squarer01=%rcx
  631. shld $13,%rsi,%rcx
  632. # qhasm: t70 &= squareredmask
  633. # asm 1: and <squareredmask=int64#3,<t70=int64#2
  634. # asm 2: and <squareredmask=%rdx,<t70=%rsi
  635. and %rdx,%rsi
  636. # qhasm: squarer11 = (squarer11.t71) << 13
  637. # asm 1: shld $13,<t71=int64#5,<squarer11=int64#6
  638. # asm 2: shld $13,<t71=%r8,<squarer11=%r9
  639. shld $13,%r8,%r9
  640. # qhasm: t71 &= squareredmask
  641. # asm 1: and <squareredmask=int64#3,<t71=int64#5
  642. # asm 2: and <squareredmask=%rdx,<t71=%r8
  643. and %rdx,%r8
  644. # qhasm: t71 += squarer01
  645. # asm 1: add <squarer01=int64#4,<t71=int64#5
  646. # asm 2: add <squarer01=%rcx,<t71=%r8
  647. add %rcx,%r8
  648. # qhasm: squarer21 = (squarer21.t72) << 13
  649. # asm 1: shld $13,<t72=int64#8,<squarer21=int64#9
  650. # asm 2: shld $13,<t72=%r10,<squarer21=%r11
  651. shld $13,%r10,%r11
  652. # qhasm: t72 &= squareredmask
  653. # asm 1: and <squareredmask=int64#3,<t72=int64#8
  654. # asm 2: and <squareredmask=%rdx,<t72=%r10
  655. and %rdx,%r10
  656. # qhasm: t72 += squarer11
  657. # asm 1: add <squarer11=int64#6,<t72=int64#8
  658. # asm 2: add <squarer11=%r9,<t72=%r10
  659. add %r9,%r10
  660. # qhasm: squarer31 = (squarer31.t73) << 13
  661. # asm 1: shld $13,<t73=int64#10,<squarer31=int64#11
  662. # asm 2: shld $13,<t73=%r12,<squarer31=%r13
  663. shld $13,%r12,%r13
  664. # qhasm: t73 &= squareredmask
  665. # asm 1: and <squareredmask=int64#3,<t73=int64#10
  666. # asm 2: and <squareredmask=%rdx,<t73=%r12
  667. and %rdx,%r12
  668. # qhasm: t73 += squarer21
  669. # asm 1: add <squarer21=int64#9,<t73=int64#10
  670. # asm 2: add <squarer21=%r11,<t73=%r12
  671. add %r11,%r12
  672. # qhasm: squarer41 = (squarer41.t74) << 13
  673. # asm 1: shld $13,<t74=int64#12,<squarer41=int64#13
  674. # asm 2: shld $13,<t74=%r14,<squarer41=%r15
  675. shld $13,%r14,%r15
  676. # qhasm: t74 &= squareredmask
  677. # asm 1: and <squareredmask=int64#3,<t74=int64#12
  678. # asm 2: and <squareredmask=%rdx,<t74=%r14
  679. and %rdx,%r14
  680. # qhasm: t74 += squarer31
  681. # asm 1: add <squarer31=int64#11,<t74=int64#12
  682. # asm 2: add <squarer31=%r13,<t74=%r14
  683. add %r13,%r14
  684. # qhasm: squarer41 = squarer41 * 19
  685. # asm 1: imulq $19,<squarer41=int64#13,>squarer41=int64#4
  686. # asm 2: imulq $19,<squarer41=%r15,>squarer41=%rcx
  687. imulq $19,%r15,%rcx
  688. # qhasm: t70 += squarer41
  689. # asm 1: add <squarer41=int64#4,<t70=int64#2
  690. # asm 2: add <squarer41=%rcx,<t70=%rsi
  691. add %rcx,%rsi
  692. # qhasm: squaret = t70
  693. # asm 1: mov <t70=int64#2,>squaret=int64#4
  694. # asm 2: mov <t70=%rsi,>squaret=%rcx
  695. mov %rsi,%rcx
  696. # qhasm: (uint64) squaret >>= 51
  697. # asm 1: shr $51,<squaret=int64#4
  698. # asm 2: shr $51,<squaret=%rcx
  699. shr $51,%rcx
  700. # qhasm: squaret += t71
  701. # asm 1: add <t71=int64#5,<squaret=int64#4
  702. # asm 2: add <t71=%r8,<squaret=%rcx
  703. add %r8,%rcx
  704. # qhasm: t70 &= squareredmask
  705. # asm 1: and <squareredmask=int64#3,<t70=int64#2
  706. # asm 2: and <squareredmask=%rdx,<t70=%rsi
  707. and %rdx,%rsi
  708. # qhasm: t71 = squaret
  709. # asm 1: mov <squaret=int64#4,>t71=int64#5
  710. # asm 2: mov <squaret=%rcx,>t71=%r8
  711. mov %rcx,%r8
  712. # qhasm: (uint64) squaret >>= 51
  713. # asm 1: shr $51,<squaret=int64#4
  714. # asm 2: shr $51,<squaret=%rcx
  715. shr $51,%rcx
  716. # qhasm: squaret += t72
  717. # asm 1: add <t72=int64#8,<squaret=int64#4
  718. # asm 2: add <t72=%r10,<squaret=%rcx
  719. add %r10,%rcx
  720. # qhasm: t71 &= squareredmask
  721. # asm 1: and <squareredmask=int64#3,<t71=int64#5
  722. # asm 2: and <squareredmask=%rdx,<t71=%r8
  723. and %rdx,%r8
  724. # qhasm: t72 = squaret
  725. # asm 1: mov <squaret=int64#4,>t72=int64#6
  726. # asm 2: mov <squaret=%rcx,>t72=%r9
  727. mov %rcx,%r9
  728. # qhasm: (uint64) squaret >>= 51
  729. # asm 1: shr $51,<squaret=int64#4
  730. # asm 2: shr $51,<squaret=%rcx
  731. shr $51,%rcx
  732. # qhasm: squaret += t73
  733. # asm 1: add <t73=int64#10,<squaret=int64#4
  734. # asm 2: add <t73=%r12,<squaret=%rcx
  735. add %r12,%rcx
  736. # qhasm: t72 &= squareredmask
  737. # asm 1: and <squareredmask=int64#3,<t72=int64#6
  738. # asm 2: and <squareredmask=%rdx,<t72=%r9
  739. and %rdx,%r9
  740. # qhasm: t73 = squaret
  741. # asm 1: mov <squaret=int64#4,>t73=int64#7
  742. # asm 2: mov <squaret=%rcx,>t73=%rax
  743. mov %rcx,%rax
  744. # qhasm: (uint64) squaret >>= 51
  745. # asm 1: shr $51,<squaret=int64#4
  746. # asm 2: shr $51,<squaret=%rcx
  747. shr $51,%rcx
  748. # qhasm: squaret += t74
  749. # asm 1: add <t74=int64#12,<squaret=int64#4
  750. # asm 2: add <t74=%r14,<squaret=%rcx
  751. add %r14,%rcx
  752. # qhasm: t73 &= squareredmask
  753. # asm 1: and <squareredmask=int64#3,<t73=int64#7
  754. # asm 2: and <squareredmask=%rdx,<t73=%rax
  755. and %rdx,%rax
  756. # qhasm: t74 = squaret
  757. # asm 1: mov <squaret=int64#4,>t74=int64#8
  758. # asm 2: mov <squaret=%rcx,>t74=%r10
  759. mov %rcx,%r10
  760. # qhasm: (uint64) squaret >>= 51
  761. # asm 1: shr $51,<squaret=int64#4
  762. # asm 2: shr $51,<squaret=%rcx
  763. shr $51,%rcx
  764. # qhasm: squaret *= 19
  765. # asm 1: imulq $19,<squaret=int64#4,>squaret=int64#4
  766. # asm 2: imulq $19,<squaret=%rcx,>squaret=%rcx
  767. imulq $19,%rcx,%rcx
  768. # qhasm: t70 += squaret
  769. # asm 1: add <squaret=int64#4,<t70=int64#2
  770. # asm 2: add <squaret=%rcx,<t70=%rsi
  771. add %rcx,%rsi
  772. # qhasm: t74 &= squareredmask
  773. # asm 1: and <squareredmask=int64#3,<t74=int64#8
  774. # asm 2: and <squareredmask=%rdx,<t74=%r10
  775. and %rdx,%r10
  776. # qhasm: t70_stack = t70
  777. # asm 1: movq <t70=int64#2,>t70_stack=stack64#18
  778. # asm 2: movq <t70=%rsi,>t70_stack=136(%rsp)
  779. movq %rsi,136(%rsp)
  780. # qhasm: t71_stack = t71
  781. # asm 1: movq <t71=int64#5,>t71_stack=stack64#19
  782. # asm 2: movq <t71=%r8,>t71_stack=144(%rsp)
  783. movq %r8,144(%rsp)
  784. # qhasm: t72_stack = t72
  785. # asm 1: movq <t72=int64#6,>t72_stack=stack64#20
  786. # asm 2: movq <t72=%r9,>t72_stack=152(%rsp)
  787. movq %r9,152(%rsp)
  788. # qhasm: t73_stack = t73
  789. # asm 1: movq <t73=int64#7,>t73_stack=stack64#21
  790. # asm 2: movq <t73=%rax,>t73_stack=160(%rsp)
  791. movq %rax,160(%rsp)
  792. # qhasm: t74_stack = t74
  793. # asm 1: movq <t74=int64#8,>t74_stack=stack64#22
  794. # asm 2: movq <t74=%r10,>t74_stack=168(%rsp)
  795. movq %r10,168(%rsp)
  796. # qhasm: squarerax = t10_stack
  797. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  798. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  799. movq 56(%rsp),%rax
  800. # qhasm: (uint128) squarerdx squarerax = squarerax * t10_stack
  801. # asm 1: mulq <t10_stack=stack64#8
  802. # asm 2: mulq <t10_stack=56(%rsp)
  803. mulq 56(%rsp)
  804. # qhasm: t60 = squarerax
  805. # asm 1: mov <squarerax=int64#7,>t60=int64#2
  806. # asm 2: mov <squarerax=%rax,>t60=%rsi
  807. mov %rax,%rsi
  808. # qhasm: squarer01 = squarerdx
  809. # asm 1: mov <squarerdx=int64#3,>squarer01=int64#4
  810. # asm 2: mov <squarerdx=%rdx,>squarer01=%rcx
  811. mov %rdx,%rcx
  812. # qhasm: squarerax = t10_stack
  813. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  814. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  815. movq 56(%rsp),%rax
  816. # qhasm: squarerax <<= 1
  817. # asm 1: shl $1,<squarerax=int64#7
  818. # asm 2: shl $1,<squarerax=%rax
  819. shl $1,%rax
  820. # qhasm: (uint128) squarerdx squarerax = squarerax * t11_stack
  821. # asm 1: mulq <t11_stack=stack64#9
  822. # asm 2: mulq <t11_stack=64(%rsp)
  823. mulq 64(%rsp)
  824. # qhasm: t61 = squarerax
  825. # asm 1: mov <squarerax=int64#7,>t61=int64#5
  826. # asm 2: mov <squarerax=%rax,>t61=%r8
  827. mov %rax,%r8
  828. # qhasm: squarer11 = squarerdx
  829. # asm 1: mov <squarerdx=int64#3,>squarer11=int64#6
  830. # asm 2: mov <squarerdx=%rdx,>squarer11=%r9
  831. mov %rdx,%r9
  832. # qhasm: squarerax = t10_stack
  833. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  834. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  835. movq 56(%rsp),%rax
  836. # qhasm: squarerax <<= 1
  837. # asm 1: shl $1,<squarerax=int64#7
  838. # asm 2: shl $1,<squarerax=%rax
  839. shl $1,%rax
  840. # qhasm: (uint128) squarerdx squarerax = squarerax * t12_stack
  841. # asm 1: mulq <t12_stack=stack64#10
  842. # asm 2: mulq <t12_stack=72(%rsp)
  843. mulq 72(%rsp)
  844. # qhasm: t62 = squarerax
  845. # asm 1: mov <squarerax=int64#7,>t62=int64#8
  846. # asm 2: mov <squarerax=%rax,>t62=%r10
  847. mov %rax,%r10
  848. # qhasm: squarer21 = squarerdx
  849. # asm 1: mov <squarerdx=int64#3,>squarer21=int64#9
  850. # asm 2: mov <squarerdx=%rdx,>squarer21=%r11
  851. mov %rdx,%r11
  852. # qhasm: squarerax = t10_stack
  853. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  854. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  855. movq 56(%rsp),%rax
  856. # qhasm: squarerax <<= 1
  857. # asm 1: shl $1,<squarerax=int64#7
  858. # asm 2: shl $1,<squarerax=%rax
  859. shl $1,%rax
  860. # qhasm: (uint128) squarerdx squarerax = squarerax * t13_stack
  861. # asm 1: mulq <t13_stack=stack64#11
  862. # asm 2: mulq <t13_stack=80(%rsp)
  863. mulq 80(%rsp)
  864. # qhasm: t63 = squarerax
  865. # asm 1: mov <squarerax=int64#7,>t63=int64#10
  866. # asm 2: mov <squarerax=%rax,>t63=%r12
  867. mov %rax,%r12
  868. # qhasm: squarer31 = squarerdx
  869. # asm 1: mov <squarerdx=int64#3,>squarer31=int64#11
  870. # asm 2: mov <squarerdx=%rdx,>squarer31=%r13
  871. mov %rdx,%r13
  872. # qhasm: squarerax = t10_stack
  873. # asm 1: movq <t10_stack=stack64#8,>squarerax=int64#7
  874. # asm 2: movq <t10_stack=56(%rsp),>squarerax=%rax
  875. movq 56(%rsp),%rax
  876. # qhasm: squarerax <<= 1
  877. # asm 1: shl $1,<squarerax=int64#7
  878. # asm 2: shl $1,<squarerax=%rax
  879. shl $1,%rax
  880. # qhasm: (uint128) squarerdx squarerax = squarerax * t14_stack
  881. # asm 1: mulq <t14_stack=stack64#12
  882. # asm 2: mulq <t14_stack=88(%rsp)
  883. mulq 88(%rsp)
  884. # qhasm: t64 = squarerax
  885. # asm 1: mov <squarerax=int64#7,>t64=int64#12
  886. # asm 2: mov <squarerax=%rax,>t64=%r14
  887. mov %rax,%r14
  888. # qhasm: squarer41 = squarerdx
  889. # asm 1: mov <squarerdx=int64#3,>squarer41=int64#13
  890. # asm 2: mov <squarerdx=%rdx,>squarer41=%r15
  891. mov %rdx,%r15
  892. # qhasm: squarerax = t11_stack
  893. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#7
  894. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rax
  895. movq 64(%rsp),%rax
  896. # qhasm: (uint128) squarerdx squarerax = squarerax * t11_stack
  897. # asm 1: mulq <t11_stack=stack64#9
  898. # asm 2: mulq <t11_stack=64(%rsp)
  899. mulq 64(%rsp)
  900. # qhasm: carry? t62 += squarerax
  901. # asm 1: add <squarerax=int64#7,<t62=int64#8
  902. # asm 2: add <squarerax=%rax,<t62=%r10
  903. add %rax,%r10
  904. # qhasm: squarer21 += squarerdx + carry
  905. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  906. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  907. adc %rdx,%r11
  908. # qhasm: squarerax = t11_stack
  909. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#7
  910. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rax
  911. movq 64(%rsp),%rax
  912. # qhasm: squarerax <<= 1
  913. # asm 1: shl $1,<squarerax=int64#7
  914. # asm 2: shl $1,<squarerax=%rax
  915. shl $1,%rax
  916. # qhasm: (uint128) squarerdx squarerax = squarerax * t12_stack
  917. # asm 1: mulq <t12_stack=stack64#10
  918. # asm 2: mulq <t12_stack=72(%rsp)
  919. mulq 72(%rsp)
  920. # qhasm: carry? t63 += squarerax
  921. # asm 1: add <squarerax=int64#7,<t63=int64#10
  922. # asm 2: add <squarerax=%rax,<t63=%r12
  923. add %rax,%r12
  924. # qhasm: squarer31 += squarerdx + carry
  925. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  926. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  927. adc %rdx,%r13
  928. # qhasm: squarerax = t11_stack
  929. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#7
  930. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rax
  931. movq 64(%rsp),%rax
  932. # qhasm: squarerax <<= 1
  933. # asm 1: shl $1,<squarerax=int64#7
  934. # asm 2: shl $1,<squarerax=%rax
  935. shl $1,%rax
  936. # qhasm: (uint128) squarerdx squarerax = squarerax * t13_stack
  937. # asm 1: mulq <t13_stack=stack64#11
  938. # asm 2: mulq <t13_stack=80(%rsp)
  939. mulq 80(%rsp)
  940. # qhasm: carry? t64 += squarerax
  941. # asm 1: add <squarerax=int64#7,<t64=int64#12
  942. # asm 2: add <squarerax=%rax,<t64=%r14
  943. add %rax,%r14
  944. # qhasm: squarer41 += squarerdx + carry
  945. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  946. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  947. adc %rdx,%r15
  948. # qhasm: squarerax = t11_stack
  949. # asm 1: movq <t11_stack=stack64#9,>squarerax=int64#3
  950. # asm 2: movq <t11_stack=64(%rsp),>squarerax=%rdx
  951. movq 64(%rsp),%rdx
  952. # qhasm: squarerax *= 38
  953. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  954. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  955. imulq $38,%rdx,%rax
  956. # qhasm: (uint128) squarerdx squarerax = squarerax * t14_stack
  957. # asm 1: mulq <t14_stack=stack64#12
  958. # asm 2: mulq <t14_stack=88(%rsp)
  959. mulq 88(%rsp)
  960. # qhasm: carry? t60 += squarerax
  961. # asm 1: add <squarerax=int64#7,<t60=int64#2
  962. # asm 2: add <squarerax=%rax,<t60=%rsi
  963. add %rax,%rsi
  964. # qhasm: squarer01 += squarerdx + carry
  965. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  966. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  967. adc %rdx,%rcx
  968. # qhasm: squarerax = t12_stack
  969. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#7
  970. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rax
  971. movq 72(%rsp),%rax
  972. # qhasm: (uint128) squarerdx squarerax = squarerax * t12_stack
  973. # asm 1: mulq <t12_stack=stack64#10
  974. # asm 2: mulq <t12_stack=72(%rsp)
  975. mulq 72(%rsp)
  976. # qhasm: carry? t64 += squarerax
  977. # asm 1: add <squarerax=int64#7,<t64=int64#12
  978. # asm 2: add <squarerax=%rax,<t64=%r14
  979. add %rax,%r14
  980. # qhasm: squarer41 += squarerdx + carry
  981. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  982. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  983. adc %rdx,%r15
  984. # qhasm: squarerax = t12_stack
  985. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#3
  986. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rdx
  987. movq 72(%rsp),%rdx
  988. # qhasm: squarerax *= 38
  989. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  990. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  991. imulq $38,%rdx,%rax
  992. # qhasm: (uint128) squarerdx squarerax = squarerax * t13_stack
  993. # asm 1: mulq <t13_stack=stack64#11
  994. # asm 2: mulq <t13_stack=80(%rsp)
  995. mulq 80(%rsp)
  996. # qhasm: carry? t60 += squarerax
  997. # asm 1: add <squarerax=int64#7,<t60=int64#2
  998. # asm 2: add <squarerax=%rax,<t60=%rsi
  999. add %rax,%rsi
  1000. # qhasm: squarer01 += squarerdx + carry
  1001. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  1002. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  1003. adc %rdx,%rcx
  1004. # qhasm: squarerax = t12_stack
  1005. # asm 1: movq <t12_stack=stack64#10,>squarerax=int64#3
  1006. # asm 2: movq <t12_stack=72(%rsp),>squarerax=%rdx
  1007. movq 72(%rsp),%rdx
  1008. # qhasm: squarerax *= 38
  1009. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  1010. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  1011. imulq $38,%rdx,%rax
  1012. # qhasm: (uint128) squarerdx squarerax = squarerax * t14_stack
  1013. # asm 1: mulq <t14_stack=stack64#12
  1014. # asm 2: mulq <t14_stack=88(%rsp)
  1015. mulq 88(%rsp)
  1016. # qhasm: carry? t61 += squarerax
  1017. # asm 1: add <squarerax=int64#7,<t61=int64#5
  1018. # asm 2: add <squarerax=%rax,<t61=%r8
  1019. add %rax,%r8
  1020. # qhasm: squarer11 += squarerdx + carry
  1021. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  1022. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  1023. adc %rdx,%r9
  1024. # qhasm: squarerax = t13_stack
  1025. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#3
  1026. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rdx
  1027. movq 80(%rsp),%rdx
  1028. # qhasm: squarerax *= 19
  1029. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  1030. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  1031. imulq $19,%rdx,%rax
  1032. # qhasm: (uint128) squarerdx squarerax = squarerax * t13_stack
  1033. # asm 1: mulq <t13_stack=stack64#11
  1034. # asm 2: mulq <t13_stack=80(%rsp)
  1035. mulq 80(%rsp)
  1036. # qhasm: carry? t61 += squarerax
  1037. # asm 1: add <squarerax=int64#7,<t61=int64#5
  1038. # asm 2: add <squarerax=%rax,<t61=%r8
  1039. add %rax,%r8
  1040. # qhasm: squarer11 += squarerdx + carry
  1041. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  1042. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  1043. adc %rdx,%r9
  1044. # qhasm: squarerax = t13_stack
  1045. # asm 1: movq <t13_stack=stack64#11,>squarerax=int64#3
  1046. # asm 2: movq <t13_stack=80(%rsp),>squarerax=%rdx
  1047. movq 80(%rsp),%rdx
  1048. # qhasm: squarerax *= 38
  1049. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  1050. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  1051. imulq $38,%rdx,%rax
  1052. # qhasm: (uint128) squarerdx squarerax = squarerax * t14_stack
  1053. # asm 1: mulq <t14_stack=stack64#12
  1054. # asm 2: mulq <t14_stack=88(%rsp)
  1055. mulq 88(%rsp)
  1056. # qhasm: carry? t62 += squarerax
  1057. # asm 1: add <squarerax=int64#7,<t62=int64#8
  1058. # asm 2: add <squarerax=%rax,<t62=%r10
  1059. add %rax,%r10
  1060. # qhasm: squarer21 += squarerdx + carry
  1061. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  1062. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  1063. adc %rdx,%r11
  1064. # qhasm: squarerax = t14_stack
  1065. # asm 1: movq <t14_stack=stack64#12,>squarerax=int64#3
  1066. # asm 2: movq <t14_stack=88(%rsp),>squarerax=%rdx
  1067. movq 88(%rsp),%rdx
  1068. # qhasm: squarerax *= 19
  1069. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  1070. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  1071. imulq $19,%rdx,%rax
  1072. # qhasm: (uint128) squarerdx squarerax = squarerax * t14_stack
  1073. # asm 1: mulq <t14_stack=stack64#12
  1074. # asm 2: mulq <t14_stack=88(%rsp)
  1075. mulq 88(%rsp)
  1076. # qhasm: carry? t63 += squarerax
  1077. # asm 1: add <squarerax=int64#7,<t63=int64#10
  1078. # asm 2: add <squarerax=%rax,<t63=%r12
  1079. add %rax,%r12
  1080. # qhasm: squarer31 += squarerdx + carry
  1081. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  1082. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  1083. adc %rdx,%r13
  1084. # qhasm: squareredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  1085. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=int64#3
  1086. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=%rdx
  1087. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  1088. # qhasm: squarer01 = (squarer01.t60) << 13
  1089. # asm 1: shld $13,<t60=int64#2,<squarer01=int64#4
  1090. # asm 2: shld $13,<t60=%rsi,<squarer01=%rcx
  1091. shld $13,%rsi,%rcx
  1092. # qhasm: t60 &= squareredmask
  1093. # asm 1: and <squareredmask=int64#3,<t60=int64#2
  1094. # asm 2: and <squareredmask=%rdx,<t60=%rsi
  1095. and %rdx,%rsi
  1096. # qhasm: squarer11 = (squarer11.t61) << 13
  1097. # asm 1: shld $13,<t61=int64#5,<squarer11=int64#6
  1098. # asm 2: shld $13,<t61=%r8,<squarer11=%r9
  1099. shld $13,%r8,%r9
  1100. # qhasm: t61 &= squareredmask
  1101. # asm 1: and <squareredmask=int64#3,<t61=int64#5
  1102. # asm 2: and <squareredmask=%rdx,<t61=%r8
  1103. and %rdx,%r8
  1104. # qhasm: t61 += squarer01
  1105. # asm 1: add <squarer01=int64#4,<t61=int64#5
  1106. # asm 2: add <squarer01=%rcx,<t61=%r8
  1107. add %rcx,%r8
  1108. # qhasm: squarer21 = (squarer21.t62) << 13
  1109. # asm 1: shld $13,<t62=int64#8,<squarer21=int64#9
  1110. # asm 2: shld $13,<t62=%r10,<squarer21=%r11
  1111. shld $13,%r10,%r11
  1112. # qhasm: t62 &= squareredmask
  1113. # asm 1: and <squareredmask=int64#3,<t62=int64#8
  1114. # asm 2: and <squareredmask=%rdx,<t62=%r10
  1115. and %rdx,%r10
  1116. # qhasm: t62 += squarer11
  1117. # asm 1: add <squarer11=int64#6,<t62=int64#8
  1118. # asm 2: add <squarer11=%r9,<t62=%r10
  1119. add %r9,%r10
  1120. # qhasm: squarer31 = (squarer31.t63) << 13
  1121. # asm 1: shld $13,<t63=int64#10,<squarer31=int64#11
  1122. # asm 2: shld $13,<t63=%r12,<squarer31=%r13
  1123. shld $13,%r12,%r13
  1124. # qhasm: t63 &= squareredmask
  1125. # asm 1: and <squareredmask=int64#3,<t63=int64#10
  1126. # asm 2: and <squareredmask=%rdx,<t63=%r12
  1127. and %rdx,%r12
  1128. # qhasm: t63 += squarer21
  1129. # asm 1: add <squarer21=int64#9,<t63=int64#10
  1130. # asm 2: add <squarer21=%r11,<t63=%r12
  1131. add %r11,%r12
  1132. # qhasm: squarer41 = (squarer41.t64) << 13
  1133. # asm 1: shld $13,<t64=int64#12,<squarer41=int64#13
  1134. # asm 2: shld $13,<t64=%r14,<squarer41=%r15
  1135. shld $13,%r14,%r15
  1136. # qhasm: t64 &= squareredmask
  1137. # asm 1: and <squareredmask=int64#3,<t64=int64#12
  1138. # asm 2: and <squareredmask=%rdx,<t64=%r14
  1139. and %rdx,%r14
  1140. # qhasm: t64 += squarer31
  1141. # asm 1: add <squarer31=int64#11,<t64=int64#12
  1142. # asm 2: add <squarer31=%r13,<t64=%r14
  1143. add %r13,%r14
  1144. # qhasm: squarer41 = squarer41 * 19
  1145. # asm 1: imulq $19,<squarer41=int64#13,>squarer41=int64#4
  1146. # asm 2: imulq $19,<squarer41=%r15,>squarer41=%rcx
  1147. imulq $19,%r15,%rcx
  1148. # qhasm: t60 += squarer41
  1149. # asm 1: add <squarer41=int64#4,<t60=int64#2
  1150. # asm 2: add <squarer41=%rcx,<t60=%rsi
  1151. add %rcx,%rsi
  1152. # qhasm: squaret = t60
  1153. # asm 1: mov <t60=int64#2,>squaret=int64#4
  1154. # asm 2: mov <t60=%rsi,>squaret=%rcx
  1155. mov %rsi,%rcx
  1156. # qhasm: (uint64) squaret >>= 51
  1157. # asm 1: shr $51,<squaret=int64#4
  1158. # asm 2: shr $51,<squaret=%rcx
  1159. shr $51,%rcx
  1160. # qhasm: squaret += t61
  1161. # asm 1: add <t61=int64#5,<squaret=int64#4
  1162. # asm 2: add <t61=%r8,<squaret=%rcx
  1163. add %r8,%rcx
  1164. # qhasm: t60 &= squareredmask
  1165. # asm 1: and <squareredmask=int64#3,<t60=int64#2
  1166. # asm 2: and <squareredmask=%rdx,<t60=%rsi
  1167. and %rdx,%rsi
  1168. # qhasm: t61 = squaret
  1169. # asm 1: mov <squaret=int64#4,>t61=int64#5
  1170. # asm 2: mov <squaret=%rcx,>t61=%r8
  1171. mov %rcx,%r8
  1172. # qhasm: (uint64) squaret >>= 51
  1173. # asm 1: shr $51,<squaret=int64#4
  1174. # asm 2: shr $51,<squaret=%rcx
  1175. shr $51,%rcx
  1176. # qhasm: squaret += t62
  1177. # asm 1: add <t62=int64#8,<squaret=int64#4
  1178. # asm 2: add <t62=%r10,<squaret=%rcx
  1179. add %r10,%rcx
  1180. # qhasm: t61 &= squareredmask
  1181. # asm 1: and <squareredmask=int64#3,<t61=int64#5
  1182. # asm 2: and <squareredmask=%rdx,<t61=%r8
  1183. and %rdx,%r8
  1184. # qhasm: t62 = squaret
  1185. # asm 1: mov <squaret=int64#4,>t62=int64#6
  1186. # asm 2: mov <squaret=%rcx,>t62=%r9
  1187. mov %rcx,%r9
  1188. # qhasm: (uint64) squaret >>= 51
  1189. # asm 1: shr $51,<squaret=int64#4
  1190. # asm 2: shr $51,<squaret=%rcx
  1191. shr $51,%rcx
  1192. # qhasm: squaret += t63
  1193. # asm 1: add <t63=int64#10,<squaret=int64#4
  1194. # asm 2: add <t63=%r12,<squaret=%rcx
  1195. add %r12,%rcx
  1196. # qhasm: t62 &= squareredmask
  1197. # asm 1: and <squareredmask=int64#3,<t62=int64#6
  1198. # asm 2: and <squareredmask=%rdx,<t62=%r9
  1199. and %rdx,%r9
  1200. # qhasm: t63 = squaret
  1201. # asm 1: mov <squaret=int64#4,>t63=int64#7
  1202. # asm 2: mov <squaret=%rcx,>t63=%rax
  1203. mov %rcx,%rax
  1204. # qhasm: (uint64) squaret >>= 51
  1205. # asm 1: shr $51,<squaret=int64#4
  1206. # asm 2: shr $51,<squaret=%rcx
  1207. shr $51,%rcx
  1208. # qhasm: squaret += t64
  1209. # asm 1: add <t64=int64#12,<squaret=int64#4
  1210. # asm 2: add <t64=%r14,<squaret=%rcx
  1211. add %r14,%rcx
  1212. # qhasm: t63 &= squareredmask
  1213. # asm 1: and <squareredmask=int64#3,<t63=int64#7
  1214. # asm 2: and <squareredmask=%rdx,<t63=%rax
  1215. and %rdx,%rax
  1216. # qhasm: t64 = squaret
  1217. # asm 1: mov <squaret=int64#4,>t64=int64#8
  1218. # asm 2: mov <squaret=%rcx,>t64=%r10
  1219. mov %rcx,%r10
  1220. # qhasm: (uint64) squaret >>= 51
  1221. # asm 1: shr $51,<squaret=int64#4
  1222. # asm 2: shr $51,<squaret=%rcx
  1223. shr $51,%rcx
  1224. # qhasm: squaret *= 19
  1225. # asm 1: imulq $19,<squaret=int64#4,>squaret=int64#4
  1226. # asm 2: imulq $19,<squaret=%rcx,>squaret=%rcx
  1227. imulq $19,%rcx,%rcx
  1228. # qhasm: t60 += squaret
  1229. # asm 1: add <squaret=int64#4,<t60=int64#2
  1230. # asm 2: add <squaret=%rcx,<t60=%rsi
  1231. add %rcx,%rsi
  1232. # qhasm: t64 &= squareredmask
  1233. # asm 1: and <squareredmask=int64#3,<t64=int64#8
  1234. # asm 2: and <squareredmask=%rdx,<t64=%r10
  1235. and %rdx,%r10
  1236. # qhasm: t60_stack = t60
  1237. # asm 1: movq <t60=int64#2,>t60_stack=stack64#23
  1238. # asm 2: movq <t60=%rsi,>t60_stack=176(%rsp)
  1239. movq %rsi,176(%rsp)
  1240. # qhasm: t61_stack = t61
  1241. # asm 1: movq <t61=int64#5,>t61_stack=stack64#24
  1242. # asm 2: movq <t61=%r8,>t61_stack=184(%rsp)
  1243. movq %r8,184(%rsp)
  1244. # qhasm: t62_stack = t62
  1245. # asm 1: movq <t62=int64#6,>t62_stack=stack64#25
  1246. # asm 2: movq <t62=%r9,>t62_stack=192(%rsp)
  1247. movq %r9,192(%rsp)
  1248. # qhasm: t63_stack = t63
  1249. # asm 1: movq <t63=int64#7,>t63_stack=stack64#26
  1250. # asm 2: movq <t63=%rax,>t63_stack=200(%rsp)
  1251. movq %rax,200(%rsp)
  1252. # qhasm: t64_stack = t64
  1253. # asm 1: movq <t64=int64#8,>t64_stack=stack64#27
  1254. # asm 2: movq <t64=%r10,>t64_stack=208(%rsp)
  1255. movq %r10,208(%rsp)
  1256. # qhasm: t50 = t60
  1257. # asm 1: mov <t60=int64#2,>t50=int64#2
  1258. # asm 2: mov <t60=%rsi,>t50=%rsi
  1259. mov %rsi,%rsi
  1260. # qhasm: t51 = t61
  1261. # asm 1: mov <t61=int64#5,>t51=int64#3
  1262. # asm 2: mov <t61=%r8,>t51=%rdx
  1263. mov %r8,%rdx
  1264. # qhasm: t52 = t62
  1265. # asm 1: mov <t62=int64#6,>t52=int64#4
  1266. # asm 2: mov <t62=%r9,>t52=%rcx
  1267. mov %r9,%rcx
  1268. # qhasm: t53 = t63
  1269. # asm 1: mov <t63=int64#7,>t53=int64#5
  1270. # asm 2: mov <t63=%rax,>t53=%r8
  1271. mov %rax,%r8
  1272. # qhasm: t54 = t64
  1273. # asm 1: mov <t64=int64#8,>t54=int64#6
  1274. # asm 2: mov <t64=%r10,>t54=%r9
  1275. mov %r10,%r9
  1276. # qhasm: t50 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P0
  1277. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P0,<t50=int64#2
  1278. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P0,<t50=%rsi
  1279. add crypto_scalarmult_curve25519_amd64_51_2P0,%rsi
  1280. # qhasm: t51 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1281. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t51=int64#3
  1282. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t51=%rdx
  1283. add crypto_scalarmult_curve25519_amd64_51_2P1234,%rdx
  1284. # qhasm: t52 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1285. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t52=int64#4
  1286. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t52=%rcx
  1287. add crypto_scalarmult_curve25519_amd64_51_2P1234,%rcx
  1288. # qhasm: t53 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1289. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t53=int64#5
  1290. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t53=%r8
  1291. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r8
  1292. # qhasm: t54 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1293. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t54=int64#6
  1294. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t54=%r9
  1295. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r9
  1296. # qhasm: t50 -= t70_stack
  1297. # asm 1: subq <t70_stack=stack64#18,<t50=int64#2
  1298. # asm 2: subq <t70_stack=136(%rsp),<t50=%rsi
  1299. subq 136(%rsp),%rsi
  1300. # qhasm: t51 -= t71_stack
  1301. # asm 1: subq <t71_stack=stack64#19,<t51=int64#3
  1302. # asm 2: subq <t71_stack=144(%rsp),<t51=%rdx
  1303. subq 144(%rsp),%rdx
  1304. # qhasm: t52 -= t72_stack
  1305. # asm 1: subq <t72_stack=stack64#20,<t52=int64#4
  1306. # asm 2: subq <t72_stack=152(%rsp),<t52=%rcx
  1307. subq 152(%rsp),%rcx
  1308. # qhasm: t53 -= t73_stack
  1309. # asm 1: subq <t73_stack=stack64#21,<t53=int64#5
  1310. # asm 2: subq <t73_stack=160(%rsp),<t53=%r8
  1311. subq 160(%rsp),%r8
  1312. # qhasm: t54 -= t74_stack
  1313. # asm 1: subq <t74_stack=stack64#22,<t54=int64#6
  1314. # asm 2: subq <t74_stack=168(%rsp),<t54=%r9
  1315. subq 168(%rsp),%r9
  1316. # qhasm: t50_stack = t50
  1317. # asm 1: movq <t50=int64#2,>t50_stack=stack64#28
  1318. # asm 2: movq <t50=%rsi,>t50_stack=216(%rsp)
  1319. movq %rsi,216(%rsp)
  1320. # qhasm: t51_stack = t51
  1321. # asm 1: movq <t51=int64#3,>t51_stack=stack64#29
  1322. # asm 2: movq <t51=%rdx,>t51_stack=224(%rsp)
  1323. movq %rdx,224(%rsp)
  1324. # qhasm: t52_stack = t52
  1325. # asm 1: movq <t52=int64#4,>t52_stack=stack64#30
  1326. # asm 2: movq <t52=%rcx,>t52_stack=232(%rsp)
  1327. movq %rcx,232(%rsp)
  1328. # qhasm: t53_stack = t53
  1329. # asm 1: movq <t53=int64#5,>t53_stack=stack64#31
  1330. # asm 2: movq <t53=%r8,>t53_stack=240(%rsp)
  1331. movq %r8,240(%rsp)
  1332. # qhasm: t54_stack = t54
  1333. # asm 1: movq <t54=int64#6,>t54_stack=stack64#32
  1334. # asm 2: movq <t54=%r9,>t54_stack=248(%rsp)
  1335. movq %r9,248(%rsp)
  1336. # qhasm: t30 = *(uint64 *)(workp + 120)
  1337. # asm 1: movq 120(<workp=int64#1),>t30=int64#2
  1338. # asm 2: movq 120(<workp=%rdi),>t30=%rsi
  1339. movq 120(%rdi),%rsi
  1340. # qhasm: t31 = *(uint64 *)(workp + 128)
  1341. # asm 1: movq 128(<workp=int64#1),>t31=int64#3
  1342. # asm 2: movq 128(<workp=%rdi),>t31=%rdx
  1343. movq 128(%rdi),%rdx
  1344. # qhasm: t32 = *(uint64 *)(workp + 136)
  1345. # asm 1: movq 136(<workp=int64#1),>t32=int64#4
  1346. # asm 2: movq 136(<workp=%rdi),>t32=%rcx
  1347. movq 136(%rdi),%rcx
  1348. # qhasm: t33 = *(uint64 *)(workp + 144)
  1349. # asm 1: movq 144(<workp=int64#1),>t33=int64#5
  1350. # asm 2: movq 144(<workp=%rdi),>t33=%r8
  1351. movq 144(%rdi),%r8
  1352. # qhasm: t34 = *(uint64 *)(workp + 152)
  1353. # asm 1: movq 152(<workp=int64#1),>t34=int64#6
  1354. # asm 2: movq 152(<workp=%rdi),>t34=%r9
  1355. movq 152(%rdi),%r9
  1356. # qhasm: t40 = t30
  1357. # asm 1: mov <t30=int64#2,>t40=int64#7
  1358. # asm 2: mov <t30=%rsi,>t40=%rax
  1359. mov %rsi,%rax
  1360. # qhasm: t41 = t31
  1361. # asm 1: mov <t31=int64#3,>t41=int64#8
  1362. # asm 2: mov <t31=%rdx,>t41=%r10
  1363. mov %rdx,%r10
  1364. # qhasm: t42 = t32
  1365. # asm 1: mov <t32=int64#4,>t42=int64#9
  1366. # asm 2: mov <t32=%rcx,>t42=%r11
  1367. mov %rcx,%r11
  1368. # qhasm: t43 = t33
  1369. # asm 1: mov <t33=int64#5,>t43=int64#10
  1370. # asm 2: mov <t33=%r8,>t43=%r12
  1371. mov %r8,%r12
  1372. # qhasm: t44 = t34
  1373. # asm 1: mov <t34=int64#6,>t44=int64#11
  1374. # asm 2: mov <t34=%r9,>t44=%r13
  1375. mov %r9,%r13
  1376. # qhasm: t40 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P0
  1377. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P0,<t40=int64#7
  1378. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P0,<t40=%rax
  1379. add crypto_scalarmult_curve25519_amd64_51_2P0,%rax
  1380. # qhasm: t41 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1381. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t41=int64#8
  1382. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t41=%r10
  1383. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r10
  1384. # qhasm: t42 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1385. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t42=int64#9
  1386. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t42=%r11
  1387. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r11
  1388. # qhasm: t43 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1389. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t43=int64#10
  1390. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t43=%r12
  1391. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r12
  1392. # qhasm: t44 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  1393. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t44=int64#11
  1394. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<t44=%r13
  1395. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r13
  1396. # qhasm: t30 += *(uint64 *)(workp + 160)
  1397. # asm 1: addq 160(<workp=int64#1),<t30=int64#2
  1398. # asm 2: addq 160(<workp=%rdi),<t30=%rsi
  1399. addq 160(%rdi),%rsi
  1400. # qhasm: t31 += *(uint64 *)(workp + 168)
  1401. # asm 1: addq 168(<workp=int64#1),<t31=int64#3
  1402. # asm 2: addq 168(<workp=%rdi),<t31=%rdx
  1403. addq 168(%rdi),%rdx
  1404. # qhasm: t32 += *(uint64 *)(workp + 176)
  1405. # asm 1: addq 176(<workp=int64#1),<t32=int64#4
  1406. # asm 2: addq 176(<workp=%rdi),<t32=%rcx
  1407. addq 176(%rdi),%rcx
  1408. # qhasm: t33 += *(uint64 *)(workp + 184)
  1409. # asm 1: addq 184(<workp=int64#1),<t33=int64#5
  1410. # asm 2: addq 184(<workp=%rdi),<t33=%r8
  1411. addq 184(%rdi),%r8
  1412. # qhasm: t34 += *(uint64 *)(workp + 192)
  1413. # asm 1: addq 192(<workp=int64#1),<t34=int64#6
  1414. # asm 2: addq 192(<workp=%rdi),<t34=%r9
  1415. addq 192(%rdi),%r9
  1416. # qhasm: t40 -= *(uint64 *)(workp + 160)
  1417. # asm 1: subq 160(<workp=int64#1),<t40=int64#7
  1418. # asm 2: subq 160(<workp=%rdi),<t40=%rax
  1419. subq 160(%rdi),%rax
  1420. # qhasm: t41 -= *(uint64 *)(workp + 168)
  1421. # asm 1: subq 168(<workp=int64#1),<t41=int64#8
  1422. # asm 2: subq 168(<workp=%rdi),<t41=%r10
  1423. subq 168(%rdi),%r10
  1424. # qhasm: t42 -= *(uint64 *)(workp + 176)
  1425. # asm 1: subq 176(<workp=int64#1),<t42=int64#9
  1426. # asm 2: subq 176(<workp=%rdi),<t42=%r11
  1427. subq 176(%rdi),%r11
  1428. # qhasm: t43 -= *(uint64 *)(workp + 184)
  1429. # asm 1: subq 184(<workp=int64#1),<t43=int64#10
  1430. # asm 2: subq 184(<workp=%rdi),<t43=%r12
  1431. subq 184(%rdi),%r12
  1432. # qhasm: t44 -= *(uint64 *)(workp + 192)
  1433. # asm 1: subq 192(<workp=int64#1),<t44=int64#11
  1434. # asm 2: subq 192(<workp=%rdi),<t44=%r13
  1435. subq 192(%rdi),%r13
  1436. # qhasm: t30_stack = t30
  1437. # asm 1: movq <t30=int64#2,>t30_stack=stack64#33
  1438. # asm 2: movq <t30=%rsi,>t30_stack=256(%rsp)
  1439. movq %rsi,256(%rsp)
  1440. # qhasm: t31_stack = t31
  1441. # asm 1: movq <t31=int64#3,>t31_stack=stack64#34
  1442. # asm 2: movq <t31=%rdx,>t31_stack=264(%rsp)
  1443. movq %rdx,264(%rsp)
  1444. # qhasm: t32_stack = t32
  1445. # asm 1: movq <t32=int64#4,>t32_stack=stack64#35
  1446. # asm 2: movq <t32=%rcx,>t32_stack=272(%rsp)
  1447. movq %rcx,272(%rsp)
  1448. # qhasm: t33_stack = t33
  1449. # asm 1: movq <t33=int64#5,>t33_stack=stack64#36
  1450. # asm 2: movq <t33=%r8,>t33_stack=280(%rsp)
  1451. movq %r8,280(%rsp)
  1452. # qhasm: t34_stack = t34
  1453. # asm 1: movq <t34=int64#6,>t34_stack=stack64#37
  1454. # asm 2: movq <t34=%r9,>t34_stack=288(%rsp)
  1455. movq %r9,288(%rsp)
  1456. # qhasm: t40_stack = t40
  1457. # asm 1: movq <t40=int64#7,>t40_stack=stack64#38
  1458. # asm 2: movq <t40=%rax,>t40_stack=296(%rsp)
  1459. movq %rax,296(%rsp)
  1460. # qhasm: t41_stack = t41
  1461. # asm 1: movq <t41=int64#8,>t41_stack=stack64#39
  1462. # asm 2: movq <t41=%r10,>t41_stack=304(%rsp)
  1463. movq %r10,304(%rsp)
  1464. # qhasm: t42_stack = t42
  1465. # asm 1: movq <t42=int64#9,>t42_stack=stack64#40
  1466. # asm 2: movq <t42=%r11,>t42_stack=312(%rsp)
  1467. movq %r11,312(%rsp)
  1468. # qhasm: t43_stack = t43
  1469. # asm 1: movq <t43=int64#10,>t43_stack=stack64#41
  1470. # asm 2: movq <t43=%r12,>t43_stack=320(%rsp)
  1471. movq %r12,320(%rsp)
  1472. # qhasm: t44_stack = t44
  1473. # asm 1: movq <t44=int64#11,>t44_stack=stack64#42
  1474. # asm 2: movq <t44=%r13,>t44_stack=328(%rsp)
  1475. movq %r13,328(%rsp)
  1476. # qhasm: mulrax = t33_stack
  1477. # asm 1: movq <t33_stack=stack64#36,>mulrax=int64#2
  1478. # asm 2: movq <t33_stack=280(%rsp),>mulrax=%rsi
  1479. movq 280(%rsp),%rsi
  1480. # qhasm: mulrax *= 19
  1481. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  1482. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  1483. imulq $19,%rsi,%rax
  1484. # qhasm: mulx319_stack = mulrax
  1485. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#43
  1486. # asm 2: movq <mulrax=%rax,>mulx319_stack=336(%rsp)
  1487. movq %rax,336(%rsp)
  1488. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1489. # asm 1: mulq <t22_stack=stack64#15
  1490. # asm 2: mulq <t22_stack=112(%rsp)
  1491. mulq 112(%rsp)
  1492. # qhasm: t90 = mulrax
  1493. # asm 1: mov <mulrax=int64#7,>t90=int64#2
  1494. # asm 2: mov <mulrax=%rax,>t90=%rsi
  1495. mov %rax,%rsi
  1496. # qhasm: mulr01 = mulrdx
  1497. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  1498. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  1499. mov %rdx,%rcx
  1500. # qhasm: mulrax = t34_stack
  1501. # asm 1: movq <t34_stack=stack64#37,>mulrax=int64#3
  1502. # asm 2: movq <t34_stack=288(%rsp),>mulrax=%rdx
  1503. movq 288(%rsp),%rdx
  1504. # qhasm: mulrax *= 19
  1505. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1506. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1507. imulq $19,%rdx,%rax
  1508. # qhasm: mulx419_stack = mulrax
  1509. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#44
  1510. # asm 2: movq <mulrax=%rax,>mulx419_stack=344(%rsp)
  1511. movq %rax,344(%rsp)
  1512. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1513. # asm 1: mulq <t21_stack=stack64#14
  1514. # asm 2: mulq <t21_stack=104(%rsp)
  1515. mulq 104(%rsp)
  1516. # qhasm: carry? t90 += mulrax
  1517. # asm 1: add <mulrax=int64#7,<t90=int64#2
  1518. # asm 2: add <mulrax=%rax,<t90=%rsi
  1519. add %rax,%rsi
  1520. # qhasm: mulr01 += mulrdx + carry
  1521. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  1522. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  1523. adc %rdx,%rcx
  1524. # qhasm: mulrax = t30_stack
  1525. # asm 1: movq <t30_stack=stack64#33,>mulrax=int64#7
  1526. # asm 2: movq <t30_stack=256(%rsp),>mulrax=%rax
  1527. movq 256(%rsp),%rax
  1528. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1529. # asm 1: mulq <t20_stack=stack64#13
  1530. # asm 2: mulq <t20_stack=96(%rsp)
  1531. mulq 96(%rsp)
  1532. # qhasm: carry? t90 += mulrax
  1533. # asm 1: add <mulrax=int64#7,<t90=int64#2
  1534. # asm 2: add <mulrax=%rax,<t90=%rsi
  1535. add %rax,%rsi
  1536. # qhasm: mulr01 += mulrdx + carry
  1537. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  1538. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  1539. adc %rdx,%rcx
  1540. # qhasm: mulrax = t30_stack
  1541. # asm 1: movq <t30_stack=stack64#33,>mulrax=int64#7
  1542. # asm 2: movq <t30_stack=256(%rsp),>mulrax=%rax
  1543. movq 256(%rsp),%rax
  1544. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1545. # asm 1: mulq <t21_stack=stack64#14
  1546. # asm 2: mulq <t21_stack=104(%rsp)
  1547. mulq 104(%rsp)
  1548. # qhasm: t91 = mulrax
  1549. # asm 1: mov <mulrax=int64#7,>t91=int64#5
  1550. # asm 2: mov <mulrax=%rax,>t91=%r8
  1551. mov %rax,%r8
  1552. # qhasm: mulr11 = mulrdx
  1553. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  1554. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  1555. mov %rdx,%r9
  1556. # qhasm: mulrax = t30_stack
  1557. # asm 1: movq <t30_stack=stack64#33,>mulrax=int64#7
  1558. # asm 2: movq <t30_stack=256(%rsp),>mulrax=%rax
  1559. movq 256(%rsp),%rax
  1560. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1561. # asm 1: mulq <t22_stack=stack64#15
  1562. # asm 2: mulq <t22_stack=112(%rsp)
  1563. mulq 112(%rsp)
  1564. # qhasm: t92 = mulrax
  1565. # asm 1: mov <mulrax=int64#7,>t92=int64#8
  1566. # asm 2: mov <mulrax=%rax,>t92=%r10
  1567. mov %rax,%r10
  1568. # qhasm: mulr21 = mulrdx
  1569. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  1570. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  1571. mov %rdx,%r11
  1572. # qhasm: mulrax = t30_stack
  1573. # asm 1: movq <t30_stack=stack64#33,>mulrax=int64#7
  1574. # asm 2: movq <t30_stack=256(%rsp),>mulrax=%rax
  1575. movq 256(%rsp),%rax
  1576. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1577. # asm 1: mulq <t23_stack=stack64#16
  1578. # asm 2: mulq <t23_stack=120(%rsp)
  1579. mulq 120(%rsp)
  1580. # qhasm: t93 = mulrax
  1581. # asm 1: mov <mulrax=int64#7,>t93=int64#10
  1582. # asm 2: mov <mulrax=%rax,>t93=%r12
  1583. mov %rax,%r12
  1584. # qhasm: mulr31 = mulrdx
  1585. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  1586. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  1587. mov %rdx,%r13
  1588. # qhasm: mulrax = t30_stack
  1589. # asm 1: movq <t30_stack=stack64#33,>mulrax=int64#7
  1590. # asm 2: movq <t30_stack=256(%rsp),>mulrax=%rax
  1591. movq 256(%rsp),%rax
  1592. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1593. # asm 1: mulq <t24_stack=stack64#17
  1594. # asm 2: mulq <t24_stack=128(%rsp)
  1595. mulq 128(%rsp)
  1596. # qhasm: t94 = mulrax
  1597. # asm 1: mov <mulrax=int64#7,>t94=int64#12
  1598. # asm 2: mov <mulrax=%rax,>t94=%r14
  1599. mov %rax,%r14
  1600. # qhasm: mulr41 = mulrdx
  1601. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  1602. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  1603. mov %rdx,%r15
  1604. # qhasm: mulrax = t31_stack
  1605. # asm 1: movq <t31_stack=stack64#34,>mulrax=int64#7
  1606. # asm 2: movq <t31_stack=264(%rsp),>mulrax=%rax
  1607. movq 264(%rsp),%rax
  1608. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1609. # asm 1: mulq <t20_stack=stack64#13
  1610. # asm 2: mulq <t20_stack=96(%rsp)
  1611. mulq 96(%rsp)
  1612. # qhasm: carry? t91 += mulrax
  1613. # asm 1: add <mulrax=int64#7,<t91=int64#5
  1614. # asm 2: add <mulrax=%rax,<t91=%r8
  1615. add %rax,%r8
  1616. # qhasm: mulr11 += mulrdx + carry
  1617. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  1618. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  1619. adc %rdx,%r9
  1620. # qhasm: mulrax = t31_stack
  1621. # asm 1: movq <t31_stack=stack64#34,>mulrax=int64#7
  1622. # asm 2: movq <t31_stack=264(%rsp),>mulrax=%rax
  1623. movq 264(%rsp),%rax
  1624. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1625. # asm 1: mulq <t21_stack=stack64#14
  1626. # asm 2: mulq <t21_stack=104(%rsp)
  1627. mulq 104(%rsp)
  1628. # qhasm: carry? t92 += mulrax
  1629. # asm 1: add <mulrax=int64#7,<t92=int64#8
  1630. # asm 2: add <mulrax=%rax,<t92=%r10
  1631. add %rax,%r10
  1632. # qhasm: mulr21 += mulrdx + carry
  1633. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  1634. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  1635. adc %rdx,%r11
  1636. # qhasm: mulrax = t31_stack
  1637. # asm 1: movq <t31_stack=stack64#34,>mulrax=int64#7
  1638. # asm 2: movq <t31_stack=264(%rsp),>mulrax=%rax
  1639. movq 264(%rsp),%rax
  1640. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1641. # asm 1: mulq <t22_stack=stack64#15
  1642. # asm 2: mulq <t22_stack=112(%rsp)
  1643. mulq 112(%rsp)
  1644. # qhasm: carry? t93 += mulrax
  1645. # asm 1: add <mulrax=int64#7,<t93=int64#10
  1646. # asm 2: add <mulrax=%rax,<t93=%r12
  1647. add %rax,%r12
  1648. # qhasm: mulr31 += mulrdx + carry
  1649. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  1650. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  1651. adc %rdx,%r13
  1652. # qhasm: mulrax = t31_stack
  1653. # asm 1: movq <t31_stack=stack64#34,>mulrax=int64#7
  1654. # asm 2: movq <t31_stack=264(%rsp),>mulrax=%rax
  1655. movq 264(%rsp),%rax
  1656. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1657. # asm 1: mulq <t23_stack=stack64#16
  1658. # asm 2: mulq <t23_stack=120(%rsp)
  1659. mulq 120(%rsp)
  1660. # qhasm: carry? t94 += mulrax
  1661. # asm 1: add <mulrax=int64#7,<t94=int64#12
  1662. # asm 2: add <mulrax=%rax,<t94=%r14
  1663. add %rax,%r14
  1664. # qhasm: mulr41 += mulrdx + carry
  1665. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  1666. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  1667. adc %rdx,%r15
  1668. # qhasm: mulrax = t31_stack
  1669. # asm 1: movq <t31_stack=stack64#34,>mulrax=int64#3
  1670. # asm 2: movq <t31_stack=264(%rsp),>mulrax=%rdx
  1671. movq 264(%rsp),%rdx
  1672. # qhasm: mulrax *= 19
  1673. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1674. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1675. imulq $19,%rdx,%rax
  1676. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1677. # asm 1: mulq <t24_stack=stack64#17
  1678. # asm 2: mulq <t24_stack=128(%rsp)
  1679. mulq 128(%rsp)
  1680. # qhasm: carry? t90 += mulrax
  1681. # asm 1: add <mulrax=int64#7,<t90=int64#2
  1682. # asm 2: add <mulrax=%rax,<t90=%rsi
  1683. add %rax,%rsi
  1684. # qhasm: mulr01 += mulrdx + carry
  1685. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  1686. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  1687. adc %rdx,%rcx
  1688. # qhasm: mulrax = t32_stack
  1689. # asm 1: movq <t32_stack=stack64#35,>mulrax=int64#7
  1690. # asm 2: movq <t32_stack=272(%rsp),>mulrax=%rax
  1691. movq 272(%rsp),%rax
  1692. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1693. # asm 1: mulq <t20_stack=stack64#13
  1694. # asm 2: mulq <t20_stack=96(%rsp)
  1695. mulq 96(%rsp)
  1696. # qhasm: carry? t92 += mulrax
  1697. # asm 1: add <mulrax=int64#7,<t92=int64#8
  1698. # asm 2: add <mulrax=%rax,<t92=%r10
  1699. add %rax,%r10
  1700. # qhasm: mulr21 += mulrdx + carry
  1701. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  1702. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  1703. adc %rdx,%r11
  1704. # qhasm: mulrax = t32_stack
  1705. # asm 1: movq <t32_stack=stack64#35,>mulrax=int64#7
  1706. # asm 2: movq <t32_stack=272(%rsp),>mulrax=%rax
  1707. movq 272(%rsp),%rax
  1708. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1709. # asm 1: mulq <t21_stack=stack64#14
  1710. # asm 2: mulq <t21_stack=104(%rsp)
  1711. mulq 104(%rsp)
  1712. # qhasm: carry? t93 += mulrax
  1713. # asm 1: add <mulrax=int64#7,<t93=int64#10
  1714. # asm 2: add <mulrax=%rax,<t93=%r12
  1715. add %rax,%r12
  1716. # qhasm: mulr31 += mulrdx + carry
  1717. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  1718. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  1719. adc %rdx,%r13
  1720. # qhasm: mulrax = t32_stack
  1721. # asm 1: movq <t32_stack=stack64#35,>mulrax=int64#7
  1722. # asm 2: movq <t32_stack=272(%rsp),>mulrax=%rax
  1723. movq 272(%rsp),%rax
  1724. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1725. # asm 1: mulq <t22_stack=stack64#15
  1726. # asm 2: mulq <t22_stack=112(%rsp)
  1727. mulq 112(%rsp)
  1728. # qhasm: carry? t94 += mulrax
  1729. # asm 1: add <mulrax=int64#7,<t94=int64#12
  1730. # asm 2: add <mulrax=%rax,<t94=%r14
  1731. add %rax,%r14
  1732. # qhasm: mulr41 += mulrdx + carry
  1733. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  1734. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  1735. adc %rdx,%r15
  1736. # qhasm: mulrax = t32_stack
  1737. # asm 1: movq <t32_stack=stack64#35,>mulrax=int64#3
  1738. # asm 2: movq <t32_stack=272(%rsp),>mulrax=%rdx
  1739. movq 272(%rsp),%rdx
  1740. # qhasm: mulrax *= 19
  1741. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1742. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1743. imulq $19,%rdx,%rax
  1744. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1745. # asm 1: mulq <t23_stack=stack64#16
  1746. # asm 2: mulq <t23_stack=120(%rsp)
  1747. mulq 120(%rsp)
  1748. # qhasm: carry? t90 += mulrax
  1749. # asm 1: add <mulrax=int64#7,<t90=int64#2
  1750. # asm 2: add <mulrax=%rax,<t90=%rsi
  1751. add %rax,%rsi
  1752. # qhasm: mulr01 += mulrdx + carry
  1753. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  1754. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  1755. adc %rdx,%rcx
  1756. # qhasm: mulrax = t32_stack
  1757. # asm 1: movq <t32_stack=stack64#35,>mulrax=int64#3
  1758. # asm 2: movq <t32_stack=272(%rsp),>mulrax=%rdx
  1759. movq 272(%rsp),%rdx
  1760. # qhasm: mulrax *= 19
  1761. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  1762. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  1763. imulq $19,%rdx,%rax
  1764. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1765. # asm 1: mulq <t24_stack=stack64#17
  1766. # asm 2: mulq <t24_stack=128(%rsp)
  1767. mulq 128(%rsp)
  1768. # qhasm: carry? t91 += mulrax
  1769. # asm 1: add <mulrax=int64#7,<t91=int64#5
  1770. # asm 2: add <mulrax=%rax,<t91=%r8
  1771. add %rax,%r8
  1772. # qhasm: mulr11 += mulrdx + carry
  1773. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  1774. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  1775. adc %rdx,%r9
  1776. # qhasm: mulrax = t33_stack
  1777. # asm 1: movq <t33_stack=stack64#36,>mulrax=int64#7
  1778. # asm 2: movq <t33_stack=280(%rsp),>mulrax=%rax
  1779. movq 280(%rsp),%rax
  1780. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1781. # asm 1: mulq <t20_stack=stack64#13
  1782. # asm 2: mulq <t20_stack=96(%rsp)
  1783. mulq 96(%rsp)
  1784. # qhasm: carry? t93 += mulrax
  1785. # asm 1: add <mulrax=int64#7,<t93=int64#10
  1786. # asm 2: add <mulrax=%rax,<t93=%r12
  1787. add %rax,%r12
  1788. # qhasm: mulr31 += mulrdx + carry
  1789. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  1790. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  1791. adc %rdx,%r13
  1792. # qhasm: mulrax = t33_stack
  1793. # asm 1: movq <t33_stack=stack64#36,>mulrax=int64#7
  1794. # asm 2: movq <t33_stack=280(%rsp),>mulrax=%rax
  1795. movq 280(%rsp),%rax
  1796. # qhasm: (uint128) mulrdx mulrax = mulrax * t21_stack
  1797. # asm 1: mulq <t21_stack=stack64#14
  1798. # asm 2: mulq <t21_stack=104(%rsp)
  1799. mulq 104(%rsp)
  1800. # qhasm: carry? t94 += mulrax
  1801. # asm 1: add <mulrax=int64#7,<t94=int64#12
  1802. # asm 2: add <mulrax=%rax,<t94=%r14
  1803. add %rax,%r14
  1804. # qhasm: mulr41 += mulrdx + carry
  1805. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  1806. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  1807. adc %rdx,%r15
  1808. # qhasm: mulrax = mulx319_stack
  1809. # asm 1: movq <mulx319_stack=stack64#43,>mulrax=int64#7
  1810. # asm 2: movq <mulx319_stack=336(%rsp),>mulrax=%rax
  1811. movq 336(%rsp),%rax
  1812. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1813. # asm 1: mulq <t23_stack=stack64#16
  1814. # asm 2: mulq <t23_stack=120(%rsp)
  1815. mulq 120(%rsp)
  1816. # qhasm: carry? t91 += mulrax
  1817. # asm 1: add <mulrax=int64#7,<t91=int64#5
  1818. # asm 2: add <mulrax=%rax,<t91=%r8
  1819. add %rax,%r8
  1820. # qhasm: mulr11 += mulrdx + carry
  1821. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  1822. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  1823. adc %rdx,%r9
  1824. # qhasm: mulrax = mulx319_stack
  1825. # asm 1: movq <mulx319_stack=stack64#43,>mulrax=int64#7
  1826. # asm 2: movq <mulx319_stack=336(%rsp),>mulrax=%rax
  1827. movq 336(%rsp),%rax
  1828. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1829. # asm 1: mulq <t24_stack=stack64#17
  1830. # asm 2: mulq <t24_stack=128(%rsp)
  1831. mulq 128(%rsp)
  1832. # qhasm: carry? t92 += mulrax
  1833. # asm 1: add <mulrax=int64#7,<t92=int64#8
  1834. # asm 2: add <mulrax=%rax,<t92=%r10
  1835. add %rax,%r10
  1836. # qhasm: mulr21 += mulrdx + carry
  1837. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  1838. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  1839. adc %rdx,%r11
  1840. # qhasm: mulrax = t34_stack
  1841. # asm 1: movq <t34_stack=stack64#37,>mulrax=int64#7
  1842. # asm 2: movq <t34_stack=288(%rsp),>mulrax=%rax
  1843. movq 288(%rsp),%rax
  1844. # qhasm: (uint128) mulrdx mulrax = mulrax * t20_stack
  1845. # asm 1: mulq <t20_stack=stack64#13
  1846. # asm 2: mulq <t20_stack=96(%rsp)
  1847. mulq 96(%rsp)
  1848. # qhasm: carry? t94 += mulrax
  1849. # asm 1: add <mulrax=int64#7,<t94=int64#12
  1850. # asm 2: add <mulrax=%rax,<t94=%r14
  1851. add %rax,%r14
  1852. # qhasm: mulr41 += mulrdx + carry
  1853. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  1854. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  1855. adc %rdx,%r15
  1856. # qhasm: mulrax = mulx419_stack
  1857. # asm 1: movq <mulx419_stack=stack64#44,>mulrax=int64#7
  1858. # asm 2: movq <mulx419_stack=344(%rsp),>mulrax=%rax
  1859. movq 344(%rsp),%rax
  1860. # qhasm: (uint128) mulrdx mulrax = mulrax * t22_stack
  1861. # asm 1: mulq <t22_stack=stack64#15
  1862. # asm 2: mulq <t22_stack=112(%rsp)
  1863. mulq 112(%rsp)
  1864. # qhasm: carry? t91 += mulrax
  1865. # asm 1: add <mulrax=int64#7,<t91=int64#5
  1866. # asm 2: add <mulrax=%rax,<t91=%r8
  1867. add %rax,%r8
  1868. # qhasm: mulr11 += mulrdx + carry
  1869. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  1870. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  1871. adc %rdx,%r9
  1872. # qhasm: mulrax = mulx419_stack
  1873. # asm 1: movq <mulx419_stack=stack64#44,>mulrax=int64#7
  1874. # asm 2: movq <mulx419_stack=344(%rsp),>mulrax=%rax
  1875. movq 344(%rsp),%rax
  1876. # qhasm: (uint128) mulrdx mulrax = mulrax * t23_stack
  1877. # asm 1: mulq <t23_stack=stack64#16
  1878. # asm 2: mulq <t23_stack=120(%rsp)
  1879. mulq 120(%rsp)
  1880. # qhasm: carry? t92 += mulrax
  1881. # asm 1: add <mulrax=int64#7,<t92=int64#8
  1882. # asm 2: add <mulrax=%rax,<t92=%r10
  1883. add %rax,%r10
  1884. # qhasm: mulr21 += mulrdx + carry
  1885. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  1886. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  1887. adc %rdx,%r11
  1888. # qhasm: mulrax = mulx419_stack
  1889. # asm 1: movq <mulx419_stack=stack64#44,>mulrax=int64#7
  1890. # asm 2: movq <mulx419_stack=344(%rsp),>mulrax=%rax
  1891. movq 344(%rsp),%rax
  1892. # qhasm: (uint128) mulrdx mulrax = mulrax * t24_stack
  1893. # asm 1: mulq <t24_stack=stack64#17
  1894. # asm 2: mulq <t24_stack=128(%rsp)
  1895. mulq 128(%rsp)
  1896. # qhasm: carry? t93 += mulrax
  1897. # asm 1: add <mulrax=int64#7,<t93=int64#10
  1898. # asm 2: add <mulrax=%rax,<t93=%r12
  1899. add %rax,%r12
  1900. # qhasm: mulr31 += mulrdx + carry
  1901. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  1902. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  1903. adc %rdx,%r13
  1904. # qhasm: mulredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  1905. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=int64#3
  1906. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=%rdx
  1907. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  1908. # qhasm: mulr01 = (mulr01.t90) << 13
  1909. # asm 1: shld $13,<t90=int64#2,<mulr01=int64#4
  1910. # asm 2: shld $13,<t90=%rsi,<mulr01=%rcx
  1911. shld $13,%rsi,%rcx
  1912. # qhasm: t90 &= mulredmask
  1913. # asm 1: and <mulredmask=int64#3,<t90=int64#2
  1914. # asm 2: and <mulredmask=%rdx,<t90=%rsi
  1915. and %rdx,%rsi
  1916. # qhasm: mulr11 = (mulr11.t91) << 13
  1917. # asm 1: shld $13,<t91=int64#5,<mulr11=int64#6
  1918. # asm 2: shld $13,<t91=%r8,<mulr11=%r9
  1919. shld $13,%r8,%r9
  1920. # qhasm: t91 &= mulredmask
  1921. # asm 1: and <mulredmask=int64#3,<t91=int64#5
  1922. # asm 2: and <mulredmask=%rdx,<t91=%r8
  1923. and %rdx,%r8
  1924. # qhasm: t91 += mulr01
  1925. # asm 1: add <mulr01=int64#4,<t91=int64#5
  1926. # asm 2: add <mulr01=%rcx,<t91=%r8
  1927. add %rcx,%r8
  1928. # qhasm: mulr21 = (mulr21.t92) << 13
  1929. # asm 1: shld $13,<t92=int64#8,<mulr21=int64#9
  1930. # asm 2: shld $13,<t92=%r10,<mulr21=%r11
  1931. shld $13,%r10,%r11
  1932. # qhasm: t92 &= mulredmask
  1933. # asm 1: and <mulredmask=int64#3,<t92=int64#8
  1934. # asm 2: and <mulredmask=%rdx,<t92=%r10
  1935. and %rdx,%r10
  1936. # qhasm: t92 += mulr11
  1937. # asm 1: add <mulr11=int64#6,<t92=int64#8
  1938. # asm 2: add <mulr11=%r9,<t92=%r10
  1939. add %r9,%r10
  1940. # qhasm: mulr31 = (mulr31.t93) << 13
  1941. # asm 1: shld $13,<t93=int64#10,<mulr31=int64#11
  1942. # asm 2: shld $13,<t93=%r12,<mulr31=%r13
  1943. shld $13,%r12,%r13
  1944. # qhasm: t93 &= mulredmask
  1945. # asm 1: and <mulredmask=int64#3,<t93=int64#10
  1946. # asm 2: and <mulredmask=%rdx,<t93=%r12
  1947. and %rdx,%r12
  1948. # qhasm: t93 += mulr21
  1949. # asm 1: add <mulr21=int64#9,<t93=int64#10
  1950. # asm 2: add <mulr21=%r11,<t93=%r12
  1951. add %r11,%r12
  1952. # qhasm: mulr41 = (mulr41.t94) << 13
  1953. # asm 1: shld $13,<t94=int64#12,<mulr41=int64#13
  1954. # asm 2: shld $13,<t94=%r14,<mulr41=%r15
  1955. shld $13,%r14,%r15
  1956. # qhasm: t94 &= mulredmask
  1957. # asm 1: and <mulredmask=int64#3,<t94=int64#12
  1958. # asm 2: and <mulredmask=%rdx,<t94=%r14
  1959. and %rdx,%r14
  1960. # qhasm: t94 += mulr31
  1961. # asm 1: add <mulr31=int64#11,<t94=int64#12
  1962. # asm 2: add <mulr31=%r13,<t94=%r14
  1963. add %r13,%r14
  1964. # qhasm: mulr41 = mulr41 * 19
  1965. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  1966. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  1967. imulq $19,%r15,%rcx
  1968. # qhasm: t90 += mulr41
  1969. # asm 1: add <mulr41=int64#4,<t90=int64#2
  1970. # asm 2: add <mulr41=%rcx,<t90=%rsi
  1971. add %rcx,%rsi
  1972. # qhasm: mult = t90
  1973. # asm 1: mov <t90=int64#2,>mult=int64#4
  1974. # asm 2: mov <t90=%rsi,>mult=%rcx
  1975. mov %rsi,%rcx
  1976. # qhasm: (uint64) mult >>= 51
  1977. # asm 1: shr $51,<mult=int64#4
  1978. # asm 2: shr $51,<mult=%rcx
  1979. shr $51,%rcx
  1980. # qhasm: mult += t91
  1981. # asm 1: add <t91=int64#5,<mult=int64#4
  1982. # asm 2: add <t91=%r8,<mult=%rcx
  1983. add %r8,%rcx
  1984. # qhasm: t91 = mult
  1985. # asm 1: mov <mult=int64#4,>t91=int64#5
  1986. # asm 2: mov <mult=%rcx,>t91=%r8
  1987. mov %rcx,%r8
  1988. # qhasm: (uint64) mult >>= 51
  1989. # asm 1: shr $51,<mult=int64#4
  1990. # asm 2: shr $51,<mult=%rcx
  1991. shr $51,%rcx
  1992. # qhasm: t90 &= mulredmask
  1993. # asm 1: and <mulredmask=int64#3,<t90=int64#2
  1994. # asm 2: and <mulredmask=%rdx,<t90=%rsi
  1995. and %rdx,%rsi
  1996. # qhasm: mult += t92
  1997. # asm 1: add <t92=int64#8,<mult=int64#4
  1998. # asm 2: add <t92=%r10,<mult=%rcx
  1999. add %r10,%rcx
  2000. # qhasm: t92 = mult
  2001. # asm 1: mov <mult=int64#4,>t92=int64#6
  2002. # asm 2: mov <mult=%rcx,>t92=%r9
  2003. mov %rcx,%r9
  2004. # qhasm: (uint64) mult >>= 51
  2005. # asm 1: shr $51,<mult=int64#4
  2006. # asm 2: shr $51,<mult=%rcx
  2007. shr $51,%rcx
  2008. # qhasm: t91 &= mulredmask
  2009. # asm 1: and <mulredmask=int64#3,<t91=int64#5
  2010. # asm 2: and <mulredmask=%rdx,<t91=%r8
  2011. and %rdx,%r8
  2012. # qhasm: mult += t93
  2013. # asm 1: add <t93=int64#10,<mult=int64#4
  2014. # asm 2: add <t93=%r12,<mult=%rcx
  2015. add %r12,%rcx
  2016. # qhasm: t93 = mult
  2017. # asm 1: mov <mult=int64#4,>t93=int64#7
  2018. # asm 2: mov <mult=%rcx,>t93=%rax
  2019. mov %rcx,%rax
  2020. # qhasm: (uint64) mult >>= 51
  2021. # asm 1: shr $51,<mult=int64#4
  2022. # asm 2: shr $51,<mult=%rcx
  2023. shr $51,%rcx
  2024. # qhasm: t92 &= mulredmask
  2025. # asm 1: and <mulredmask=int64#3,<t92=int64#6
  2026. # asm 2: and <mulredmask=%rdx,<t92=%r9
  2027. and %rdx,%r9
  2028. # qhasm: mult += t94
  2029. # asm 1: add <t94=int64#12,<mult=int64#4
  2030. # asm 2: add <t94=%r14,<mult=%rcx
  2031. add %r14,%rcx
  2032. # qhasm: t94 = mult
  2033. # asm 1: mov <mult=int64#4,>t94=int64#8
  2034. # asm 2: mov <mult=%rcx,>t94=%r10
  2035. mov %rcx,%r10
  2036. # qhasm: (uint64) mult >>= 51
  2037. # asm 1: shr $51,<mult=int64#4
  2038. # asm 2: shr $51,<mult=%rcx
  2039. shr $51,%rcx
  2040. # qhasm: t93 &= mulredmask
  2041. # asm 1: and <mulredmask=int64#3,<t93=int64#7
  2042. # asm 2: and <mulredmask=%rdx,<t93=%rax
  2043. and %rdx,%rax
  2044. # qhasm: mult *= 19
  2045. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  2046. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  2047. imulq $19,%rcx,%rcx
  2048. # qhasm: t90 += mult
  2049. # asm 1: add <mult=int64#4,<t90=int64#2
  2050. # asm 2: add <mult=%rcx,<t90=%rsi
  2051. add %rcx,%rsi
  2052. # qhasm: t94 &= mulredmask
  2053. # asm 1: and <mulredmask=int64#3,<t94=int64#8
  2054. # asm 2: and <mulredmask=%rdx,<t94=%r10
  2055. and %rdx,%r10
  2056. # qhasm: t90_stack = t90
  2057. # asm 1: movq <t90=int64#2,>t90_stack=stack64#13
  2058. # asm 2: movq <t90=%rsi,>t90_stack=96(%rsp)
  2059. movq %rsi,96(%rsp)
  2060. # qhasm: t91_stack = t91
  2061. # asm 1: movq <t91=int64#5,>t91_stack=stack64#14
  2062. # asm 2: movq <t91=%r8,>t91_stack=104(%rsp)
  2063. movq %r8,104(%rsp)
  2064. # qhasm: t92_stack = t92
  2065. # asm 1: movq <t92=int64#6,>t92_stack=stack64#15
  2066. # asm 2: movq <t92=%r9,>t92_stack=112(%rsp)
  2067. movq %r9,112(%rsp)
  2068. # qhasm: t93_stack = t93
  2069. # asm 1: movq <t93=int64#7,>t93_stack=stack64#16
  2070. # asm 2: movq <t93=%rax,>t93_stack=120(%rsp)
  2071. movq %rax,120(%rsp)
  2072. # qhasm: t94_stack = t94
  2073. # asm 1: movq <t94=int64#8,>t94_stack=stack64#17
  2074. # asm 2: movq <t94=%r10,>t94_stack=128(%rsp)
  2075. movq %r10,128(%rsp)
  2076. # qhasm: mulrax = t43_stack
  2077. # asm 1: movq <t43_stack=stack64#41,>mulrax=int64#2
  2078. # asm 2: movq <t43_stack=320(%rsp),>mulrax=%rsi
  2079. movq 320(%rsp),%rsi
  2080. # qhasm: mulrax *= 19
  2081. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  2082. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  2083. imulq $19,%rsi,%rax
  2084. # qhasm: mulx319_stack = mulrax
  2085. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#33
  2086. # asm 2: movq <mulrax=%rax,>mulx319_stack=256(%rsp)
  2087. movq %rax,256(%rsp)
  2088. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  2089. # asm 1: mulq <t12_stack=stack64#10
  2090. # asm 2: mulq <t12_stack=72(%rsp)
  2091. mulq 72(%rsp)
  2092. # qhasm: t80 = mulrax
  2093. # asm 1: mov <mulrax=int64#7,>t80=int64#2
  2094. # asm 2: mov <mulrax=%rax,>t80=%rsi
  2095. mov %rax,%rsi
  2096. # qhasm: mulr01 = mulrdx
  2097. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  2098. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  2099. mov %rdx,%rcx
  2100. # qhasm: mulrax = t44_stack
  2101. # asm 1: movq <t44_stack=stack64#42,>mulrax=int64#3
  2102. # asm 2: movq <t44_stack=328(%rsp),>mulrax=%rdx
  2103. movq 328(%rsp),%rdx
  2104. # qhasm: mulrax *= 19
  2105. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2106. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2107. imulq $19,%rdx,%rax
  2108. # qhasm: mulx419_stack = mulrax
  2109. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#34
  2110. # asm 2: movq <mulrax=%rax,>mulx419_stack=264(%rsp)
  2111. movq %rax,264(%rsp)
  2112. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  2113. # asm 1: mulq <t11_stack=stack64#9
  2114. # asm 2: mulq <t11_stack=64(%rsp)
  2115. mulq 64(%rsp)
  2116. # qhasm: carry? t80 += mulrax
  2117. # asm 1: add <mulrax=int64#7,<t80=int64#2
  2118. # asm 2: add <mulrax=%rax,<t80=%rsi
  2119. add %rax,%rsi
  2120. # qhasm: mulr01 += mulrdx + carry
  2121. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2122. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2123. adc %rdx,%rcx
  2124. # qhasm: mulrax = t40_stack
  2125. # asm 1: movq <t40_stack=stack64#38,>mulrax=int64#7
  2126. # asm 2: movq <t40_stack=296(%rsp),>mulrax=%rax
  2127. movq 296(%rsp),%rax
  2128. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  2129. # asm 1: mulq <t10_stack=stack64#8
  2130. # asm 2: mulq <t10_stack=56(%rsp)
  2131. mulq 56(%rsp)
  2132. # qhasm: carry? t80 += mulrax
  2133. # asm 1: add <mulrax=int64#7,<t80=int64#2
  2134. # asm 2: add <mulrax=%rax,<t80=%rsi
  2135. add %rax,%rsi
  2136. # qhasm: mulr01 += mulrdx + carry
  2137. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2138. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2139. adc %rdx,%rcx
  2140. # qhasm: mulrax = t40_stack
  2141. # asm 1: movq <t40_stack=stack64#38,>mulrax=int64#7
  2142. # asm 2: movq <t40_stack=296(%rsp),>mulrax=%rax
  2143. movq 296(%rsp),%rax
  2144. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  2145. # asm 1: mulq <t11_stack=stack64#9
  2146. # asm 2: mulq <t11_stack=64(%rsp)
  2147. mulq 64(%rsp)
  2148. # qhasm: t81 = mulrax
  2149. # asm 1: mov <mulrax=int64#7,>t81=int64#5
  2150. # asm 2: mov <mulrax=%rax,>t81=%r8
  2151. mov %rax,%r8
  2152. # qhasm: mulr11 = mulrdx
  2153. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  2154. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  2155. mov %rdx,%r9
  2156. # qhasm: mulrax = t40_stack
  2157. # asm 1: movq <t40_stack=stack64#38,>mulrax=int64#7
  2158. # asm 2: movq <t40_stack=296(%rsp),>mulrax=%rax
  2159. movq 296(%rsp),%rax
  2160. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  2161. # asm 1: mulq <t12_stack=stack64#10
  2162. # asm 2: mulq <t12_stack=72(%rsp)
  2163. mulq 72(%rsp)
  2164. # qhasm: t82 = mulrax
  2165. # asm 1: mov <mulrax=int64#7,>t82=int64#8
  2166. # asm 2: mov <mulrax=%rax,>t82=%r10
  2167. mov %rax,%r10
  2168. # qhasm: mulr21 = mulrdx
  2169. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  2170. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  2171. mov %rdx,%r11
  2172. # qhasm: mulrax = t40_stack
  2173. # asm 1: movq <t40_stack=stack64#38,>mulrax=int64#7
  2174. # asm 2: movq <t40_stack=296(%rsp),>mulrax=%rax
  2175. movq 296(%rsp),%rax
  2176. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  2177. # asm 1: mulq <t13_stack=stack64#11
  2178. # asm 2: mulq <t13_stack=80(%rsp)
  2179. mulq 80(%rsp)
  2180. # qhasm: t83 = mulrax
  2181. # asm 1: mov <mulrax=int64#7,>t83=int64#10
  2182. # asm 2: mov <mulrax=%rax,>t83=%r12
  2183. mov %rax,%r12
  2184. # qhasm: mulr31 = mulrdx
  2185. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  2186. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  2187. mov %rdx,%r13
  2188. # qhasm: mulrax = t40_stack
  2189. # asm 1: movq <t40_stack=stack64#38,>mulrax=int64#7
  2190. # asm 2: movq <t40_stack=296(%rsp),>mulrax=%rax
  2191. movq 296(%rsp),%rax
  2192. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  2193. # asm 1: mulq <t14_stack=stack64#12
  2194. # asm 2: mulq <t14_stack=88(%rsp)
  2195. mulq 88(%rsp)
  2196. # qhasm: t84 = mulrax
  2197. # asm 1: mov <mulrax=int64#7,>t84=int64#12
  2198. # asm 2: mov <mulrax=%rax,>t84=%r14
  2199. mov %rax,%r14
  2200. # qhasm: mulr41 = mulrdx
  2201. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  2202. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  2203. mov %rdx,%r15
  2204. # qhasm: mulrax = t41_stack
  2205. # asm 1: movq <t41_stack=stack64#39,>mulrax=int64#7
  2206. # asm 2: movq <t41_stack=304(%rsp),>mulrax=%rax
  2207. movq 304(%rsp),%rax
  2208. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  2209. # asm 1: mulq <t10_stack=stack64#8
  2210. # asm 2: mulq <t10_stack=56(%rsp)
  2211. mulq 56(%rsp)
  2212. # qhasm: carry? t81 += mulrax
  2213. # asm 1: add <mulrax=int64#7,<t81=int64#5
  2214. # asm 2: add <mulrax=%rax,<t81=%r8
  2215. add %rax,%r8
  2216. # qhasm: mulr11 += mulrdx + carry
  2217. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2218. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2219. adc %rdx,%r9
  2220. # qhasm: mulrax = t41_stack
  2221. # asm 1: movq <t41_stack=stack64#39,>mulrax=int64#7
  2222. # asm 2: movq <t41_stack=304(%rsp),>mulrax=%rax
  2223. movq 304(%rsp),%rax
  2224. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  2225. # asm 1: mulq <t11_stack=stack64#9
  2226. # asm 2: mulq <t11_stack=64(%rsp)
  2227. mulq 64(%rsp)
  2228. # qhasm: carry? t82 += mulrax
  2229. # asm 1: add <mulrax=int64#7,<t82=int64#8
  2230. # asm 2: add <mulrax=%rax,<t82=%r10
  2231. add %rax,%r10
  2232. # qhasm: mulr21 += mulrdx + carry
  2233. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2234. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2235. adc %rdx,%r11
  2236. # qhasm: mulrax = t41_stack
  2237. # asm 1: movq <t41_stack=stack64#39,>mulrax=int64#7
  2238. # asm 2: movq <t41_stack=304(%rsp),>mulrax=%rax
  2239. movq 304(%rsp),%rax
  2240. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  2241. # asm 1: mulq <t12_stack=stack64#10
  2242. # asm 2: mulq <t12_stack=72(%rsp)
  2243. mulq 72(%rsp)
  2244. # qhasm: carry? t83 += mulrax
  2245. # asm 1: add <mulrax=int64#7,<t83=int64#10
  2246. # asm 2: add <mulrax=%rax,<t83=%r12
  2247. add %rax,%r12
  2248. # qhasm: mulr31 += mulrdx + carry
  2249. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2250. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2251. adc %rdx,%r13
  2252. # qhasm: mulrax = t41_stack
  2253. # asm 1: movq <t41_stack=stack64#39,>mulrax=int64#7
  2254. # asm 2: movq <t41_stack=304(%rsp),>mulrax=%rax
  2255. movq 304(%rsp),%rax
  2256. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  2257. # asm 1: mulq <t13_stack=stack64#11
  2258. # asm 2: mulq <t13_stack=80(%rsp)
  2259. mulq 80(%rsp)
  2260. # qhasm: carry? t84 += mulrax
  2261. # asm 1: add <mulrax=int64#7,<t84=int64#12
  2262. # asm 2: add <mulrax=%rax,<t84=%r14
  2263. add %rax,%r14
  2264. # qhasm: mulr41 += mulrdx + carry
  2265. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2266. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2267. adc %rdx,%r15
  2268. # qhasm: mulrax = t41_stack
  2269. # asm 1: movq <t41_stack=stack64#39,>mulrax=int64#3
  2270. # asm 2: movq <t41_stack=304(%rsp),>mulrax=%rdx
  2271. movq 304(%rsp),%rdx
  2272. # qhasm: mulrax *= 19
  2273. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2274. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2275. imulq $19,%rdx,%rax
  2276. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  2277. # asm 1: mulq <t14_stack=stack64#12
  2278. # asm 2: mulq <t14_stack=88(%rsp)
  2279. mulq 88(%rsp)
  2280. # qhasm: carry? t80 += mulrax
  2281. # asm 1: add <mulrax=int64#7,<t80=int64#2
  2282. # asm 2: add <mulrax=%rax,<t80=%rsi
  2283. add %rax,%rsi
  2284. # qhasm: mulr01 += mulrdx + carry
  2285. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2286. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2287. adc %rdx,%rcx
  2288. # qhasm: mulrax = t42_stack
  2289. # asm 1: movq <t42_stack=stack64#40,>mulrax=int64#7
  2290. # asm 2: movq <t42_stack=312(%rsp),>mulrax=%rax
  2291. movq 312(%rsp),%rax
  2292. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  2293. # asm 1: mulq <t10_stack=stack64#8
  2294. # asm 2: mulq <t10_stack=56(%rsp)
  2295. mulq 56(%rsp)
  2296. # qhasm: carry? t82 += mulrax
  2297. # asm 1: add <mulrax=int64#7,<t82=int64#8
  2298. # asm 2: add <mulrax=%rax,<t82=%r10
  2299. add %rax,%r10
  2300. # qhasm: mulr21 += mulrdx + carry
  2301. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2302. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2303. adc %rdx,%r11
  2304. # qhasm: mulrax = t42_stack
  2305. # asm 1: movq <t42_stack=stack64#40,>mulrax=int64#7
  2306. # asm 2: movq <t42_stack=312(%rsp),>mulrax=%rax
  2307. movq 312(%rsp),%rax
  2308. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  2309. # asm 1: mulq <t11_stack=stack64#9
  2310. # asm 2: mulq <t11_stack=64(%rsp)
  2311. mulq 64(%rsp)
  2312. # qhasm: carry? t83 += mulrax
  2313. # asm 1: add <mulrax=int64#7,<t83=int64#10
  2314. # asm 2: add <mulrax=%rax,<t83=%r12
  2315. add %rax,%r12
  2316. # qhasm: mulr31 += mulrdx + carry
  2317. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2318. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2319. adc %rdx,%r13
  2320. # qhasm: mulrax = t42_stack
  2321. # asm 1: movq <t42_stack=stack64#40,>mulrax=int64#7
  2322. # asm 2: movq <t42_stack=312(%rsp),>mulrax=%rax
  2323. movq 312(%rsp),%rax
  2324. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  2325. # asm 1: mulq <t12_stack=stack64#10
  2326. # asm 2: mulq <t12_stack=72(%rsp)
  2327. mulq 72(%rsp)
  2328. # qhasm: carry? t84 += mulrax
  2329. # asm 1: add <mulrax=int64#7,<t84=int64#12
  2330. # asm 2: add <mulrax=%rax,<t84=%r14
  2331. add %rax,%r14
  2332. # qhasm: mulr41 += mulrdx + carry
  2333. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2334. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2335. adc %rdx,%r15
  2336. # qhasm: mulrax = t42_stack
  2337. # asm 1: movq <t42_stack=stack64#40,>mulrax=int64#3
  2338. # asm 2: movq <t42_stack=312(%rsp),>mulrax=%rdx
  2339. movq 312(%rsp),%rdx
  2340. # qhasm: mulrax *= 19
  2341. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2342. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2343. imulq $19,%rdx,%rax
  2344. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  2345. # asm 1: mulq <t13_stack=stack64#11
  2346. # asm 2: mulq <t13_stack=80(%rsp)
  2347. mulq 80(%rsp)
  2348. # qhasm: carry? t80 += mulrax
  2349. # asm 1: add <mulrax=int64#7,<t80=int64#2
  2350. # asm 2: add <mulrax=%rax,<t80=%rsi
  2351. add %rax,%rsi
  2352. # qhasm: mulr01 += mulrdx + carry
  2353. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  2354. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  2355. adc %rdx,%rcx
  2356. # qhasm: mulrax = t42_stack
  2357. # asm 1: movq <t42_stack=stack64#40,>mulrax=int64#3
  2358. # asm 2: movq <t42_stack=312(%rsp),>mulrax=%rdx
  2359. movq 312(%rsp),%rdx
  2360. # qhasm: mulrax *= 19
  2361. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  2362. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  2363. imulq $19,%rdx,%rax
  2364. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  2365. # asm 1: mulq <t14_stack=stack64#12
  2366. # asm 2: mulq <t14_stack=88(%rsp)
  2367. mulq 88(%rsp)
  2368. # qhasm: carry? t81 += mulrax
  2369. # asm 1: add <mulrax=int64#7,<t81=int64#5
  2370. # asm 2: add <mulrax=%rax,<t81=%r8
  2371. add %rax,%r8
  2372. # qhasm: mulr11 += mulrdx + carry
  2373. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2374. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2375. adc %rdx,%r9
  2376. # qhasm: mulrax = t43_stack
  2377. # asm 1: movq <t43_stack=stack64#41,>mulrax=int64#7
  2378. # asm 2: movq <t43_stack=320(%rsp),>mulrax=%rax
  2379. movq 320(%rsp),%rax
  2380. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  2381. # asm 1: mulq <t10_stack=stack64#8
  2382. # asm 2: mulq <t10_stack=56(%rsp)
  2383. mulq 56(%rsp)
  2384. # qhasm: carry? t83 += mulrax
  2385. # asm 1: add <mulrax=int64#7,<t83=int64#10
  2386. # asm 2: add <mulrax=%rax,<t83=%r12
  2387. add %rax,%r12
  2388. # qhasm: mulr31 += mulrdx + carry
  2389. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2390. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2391. adc %rdx,%r13
  2392. # qhasm: mulrax = t43_stack
  2393. # asm 1: movq <t43_stack=stack64#41,>mulrax=int64#7
  2394. # asm 2: movq <t43_stack=320(%rsp),>mulrax=%rax
  2395. movq 320(%rsp),%rax
  2396. # qhasm: (uint128) mulrdx mulrax = mulrax * t11_stack
  2397. # asm 1: mulq <t11_stack=stack64#9
  2398. # asm 2: mulq <t11_stack=64(%rsp)
  2399. mulq 64(%rsp)
  2400. # qhasm: carry? t84 += mulrax
  2401. # asm 1: add <mulrax=int64#7,<t84=int64#12
  2402. # asm 2: add <mulrax=%rax,<t84=%r14
  2403. add %rax,%r14
  2404. # qhasm: mulr41 += mulrdx + carry
  2405. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2406. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2407. adc %rdx,%r15
  2408. # qhasm: mulrax = mulx319_stack
  2409. # asm 1: movq <mulx319_stack=stack64#33,>mulrax=int64#7
  2410. # asm 2: movq <mulx319_stack=256(%rsp),>mulrax=%rax
  2411. movq 256(%rsp),%rax
  2412. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  2413. # asm 1: mulq <t13_stack=stack64#11
  2414. # asm 2: mulq <t13_stack=80(%rsp)
  2415. mulq 80(%rsp)
  2416. # qhasm: carry? t81 += mulrax
  2417. # asm 1: add <mulrax=int64#7,<t81=int64#5
  2418. # asm 2: add <mulrax=%rax,<t81=%r8
  2419. add %rax,%r8
  2420. # qhasm: mulr11 += mulrdx + carry
  2421. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2422. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2423. adc %rdx,%r9
  2424. # qhasm: mulrax = mulx319_stack
  2425. # asm 1: movq <mulx319_stack=stack64#33,>mulrax=int64#7
  2426. # asm 2: movq <mulx319_stack=256(%rsp),>mulrax=%rax
  2427. movq 256(%rsp),%rax
  2428. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  2429. # asm 1: mulq <t14_stack=stack64#12
  2430. # asm 2: mulq <t14_stack=88(%rsp)
  2431. mulq 88(%rsp)
  2432. # qhasm: carry? t82 += mulrax
  2433. # asm 1: add <mulrax=int64#7,<t82=int64#8
  2434. # asm 2: add <mulrax=%rax,<t82=%r10
  2435. add %rax,%r10
  2436. # qhasm: mulr21 += mulrdx + carry
  2437. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2438. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2439. adc %rdx,%r11
  2440. # qhasm: mulrax = t44_stack
  2441. # asm 1: movq <t44_stack=stack64#42,>mulrax=int64#7
  2442. # asm 2: movq <t44_stack=328(%rsp),>mulrax=%rax
  2443. movq 328(%rsp),%rax
  2444. # qhasm: (uint128) mulrdx mulrax = mulrax * t10_stack
  2445. # asm 1: mulq <t10_stack=stack64#8
  2446. # asm 2: mulq <t10_stack=56(%rsp)
  2447. mulq 56(%rsp)
  2448. # qhasm: carry? t84 += mulrax
  2449. # asm 1: add <mulrax=int64#7,<t84=int64#12
  2450. # asm 2: add <mulrax=%rax,<t84=%r14
  2451. add %rax,%r14
  2452. # qhasm: mulr41 += mulrdx + carry
  2453. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  2454. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  2455. adc %rdx,%r15
  2456. # qhasm: mulrax = mulx419_stack
  2457. # asm 1: movq <mulx419_stack=stack64#34,>mulrax=int64#7
  2458. # asm 2: movq <mulx419_stack=264(%rsp),>mulrax=%rax
  2459. movq 264(%rsp),%rax
  2460. # qhasm: (uint128) mulrdx mulrax = mulrax * t12_stack
  2461. # asm 1: mulq <t12_stack=stack64#10
  2462. # asm 2: mulq <t12_stack=72(%rsp)
  2463. mulq 72(%rsp)
  2464. # qhasm: carry? t81 += mulrax
  2465. # asm 1: add <mulrax=int64#7,<t81=int64#5
  2466. # asm 2: add <mulrax=%rax,<t81=%r8
  2467. add %rax,%r8
  2468. # qhasm: mulr11 += mulrdx + carry
  2469. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  2470. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  2471. adc %rdx,%r9
  2472. # qhasm: mulrax = mulx419_stack
  2473. # asm 1: movq <mulx419_stack=stack64#34,>mulrax=int64#7
  2474. # asm 2: movq <mulx419_stack=264(%rsp),>mulrax=%rax
  2475. movq 264(%rsp),%rax
  2476. # qhasm: (uint128) mulrdx mulrax = mulrax * t13_stack
  2477. # asm 1: mulq <t13_stack=stack64#11
  2478. # asm 2: mulq <t13_stack=80(%rsp)
  2479. mulq 80(%rsp)
  2480. # qhasm: carry? t82 += mulrax
  2481. # asm 1: add <mulrax=int64#7,<t82=int64#8
  2482. # asm 2: add <mulrax=%rax,<t82=%r10
  2483. add %rax,%r10
  2484. # qhasm: mulr21 += mulrdx + carry
  2485. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  2486. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  2487. adc %rdx,%r11
  2488. # qhasm: mulrax = mulx419_stack
  2489. # asm 1: movq <mulx419_stack=stack64#34,>mulrax=int64#7
  2490. # asm 2: movq <mulx419_stack=264(%rsp),>mulrax=%rax
  2491. movq 264(%rsp),%rax
  2492. # qhasm: (uint128) mulrdx mulrax = mulrax * t14_stack
  2493. # asm 1: mulq <t14_stack=stack64#12
  2494. # asm 2: mulq <t14_stack=88(%rsp)
  2495. mulq 88(%rsp)
  2496. # qhasm: carry? t83 += mulrax
  2497. # asm 1: add <mulrax=int64#7,<t83=int64#10
  2498. # asm 2: add <mulrax=%rax,<t83=%r12
  2499. add %rax,%r12
  2500. # qhasm: mulr31 += mulrdx + carry
  2501. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  2502. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  2503. adc %rdx,%r13
  2504. # qhasm: mulredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  2505. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=int64#3
  2506. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=%rdx
  2507. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  2508. # qhasm: mulr01 = (mulr01.t80) << 13
  2509. # asm 1: shld $13,<t80=int64#2,<mulr01=int64#4
  2510. # asm 2: shld $13,<t80=%rsi,<mulr01=%rcx
  2511. shld $13,%rsi,%rcx
  2512. # qhasm: t80 &= mulredmask
  2513. # asm 1: and <mulredmask=int64#3,<t80=int64#2
  2514. # asm 2: and <mulredmask=%rdx,<t80=%rsi
  2515. and %rdx,%rsi
  2516. # qhasm: mulr11 = (mulr11.t81) << 13
  2517. # asm 1: shld $13,<t81=int64#5,<mulr11=int64#6
  2518. # asm 2: shld $13,<t81=%r8,<mulr11=%r9
  2519. shld $13,%r8,%r9
  2520. # qhasm: t81 &= mulredmask
  2521. # asm 1: and <mulredmask=int64#3,<t81=int64#5
  2522. # asm 2: and <mulredmask=%rdx,<t81=%r8
  2523. and %rdx,%r8
  2524. # qhasm: t81 += mulr01
  2525. # asm 1: add <mulr01=int64#4,<t81=int64#5
  2526. # asm 2: add <mulr01=%rcx,<t81=%r8
  2527. add %rcx,%r8
  2528. # qhasm: mulr21 = (mulr21.t82) << 13
  2529. # asm 1: shld $13,<t82=int64#8,<mulr21=int64#9
  2530. # asm 2: shld $13,<t82=%r10,<mulr21=%r11
  2531. shld $13,%r10,%r11
  2532. # qhasm: t82 &= mulredmask
  2533. # asm 1: and <mulredmask=int64#3,<t82=int64#8
  2534. # asm 2: and <mulredmask=%rdx,<t82=%r10
  2535. and %rdx,%r10
  2536. # qhasm: t82 += mulr11
  2537. # asm 1: add <mulr11=int64#6,<t82=int64#8
  2538. # asm 2: add <mulr11=%r9,<t82=%r10
  2539. add %r9,%r10
  2540. # qhasm: mulr31 = (mulr31.t83) << 13
  2541. # asm 1: shld $13,<t83=int64#10,<mulr31=int64#11
  2542. # asm 2: shld $13,<t83=%r12,<mulr31=%r13
  2543. shld $13,%r12,%r13
  2544. # qhasm: t83 &= mulredmask
  2545. # asm 1: and <mulredmask=int64#3,<t83=int64#10
  2546. # asm 2: and <mulredmask=%rdx,<t83=%r12
  2547. and %rdx,%r12
  2548. # qhasm: t83 += mulr21
  2549. # asm 1: add <mulr21=int64#9,<t83=int64#10
  2550. # asm 2: add <mulr21=%r11,<t83=%r12
  2551. add %r11,%r12
  2552. # qhasm: mulr41 = (mulr41.t84) << 13
  2553. # asm 1: shld $13,<t84=int64#12,<mulr41=int64#13
  2554. # asm 2: shld $13,<t84=%r14,<mulr41=%r15
  2555. shld $13,%r14,%r15
  2556. # qhasm: t84 &= mulredmask
  2557. # asm 1: and <mulredmask=int64#3,<t84=int64#12
  2558. # asm 2: and <mulredmask=%rdx,<t84=%r14
  2559. and %rdx,%r14
  2560. # qhasm: t84 += mulr31
  2561. # asm 1: add <mulr31=int64#11,<t84=int64#12
  2562. # asm 2: add <mulr31=%r13,<t84=%r14
  2563. add %r13,%r14
  2564. # qhasm: mulr41 = mulr41 * 19
  2565. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  2566. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  2567. imulq $19,%r15,%rcx
  2568. # qhasm: t80 += mulr41
  2569. # asm 1: add <mulr41=int64#4,<t80=int64#2
  2570. # asm 2: add <mulr41=%rcx,<t80=%rsi
  2571. add %rcx,%rsi
  2572. # qhasm: mult = t80
  2573. # asm 1: mov <t80=int64#2,>mult=int64#4
  2574. # asm 2: mov <t80=%rsi,>mult=%rcx
  2575. mov %rsi,%rcx
  2576. # qhasm: (uint64) mult >>= 51
  2577. # asm 1: shr $51,<mult=int64#4
  2578. # asm 2: shr $51,<mult=%rcx
  2579. shr $51,%rcx
  2580. # qhasm: mult += t81
  2581. # asm 1: add <t81=int64#5,<mult=int64#4
  2582. # asm 2: add <t81=%r8,<mult=%rcx
  2583. add %r8,%rcx
  2584. # qhasm: t81 = mult
  2585. # asm 1: mov <mult=int64#4,>t81=int64#5
  2586. # asm 2: mov <mult=%rcx,>t81=%r8
  2587. mov %rcx,%r8
  2588. # qhasm: (uint64) mult >>= 51
  2589. # asm 1: shr $51,<mult=int64#4
  2590. # asm 2: shr $51,<mult=%rcx
  2591. shr $51,%rcx
  2592. # qhasm: t80 &= mulredmask
  2593. # asm 1: and <mulredmask=int64#3,<t80=int64#2
  2594. # asm 2: and <mulredmask=%rdx,<t80=%rsi
  2595. and %rdx,%rsi
  2596. # qhasm: mult += t82
  2597. # asm 1: add <t82=int64#8,<mult=int64#4
  2598. # asm 2: add <t82=%r10,<mult=%rcx
  2599. add %r10,%rcx
  2600. # qhasm: t82 = mult
  2601. # asm 1: mov <mult=int64#4,>t82=int64#6
  2602. # asm 2: mov <mult=%rcx,>t82=%r9
  2603. mov %rcx,%r9
  2604. # qhasm: (uint64) mult >>= 51
  2605. # asm 1: shr $51,<mult=int64#4
  2606. # asm 2: shr $51,<mult=%rcx
  2607. shr $51,%rcx
  2608. # qhasm: t81 &= mulredmask
  2609. # asm 1: and <mulredmask=int64#3,<t81=int64#5
  2610. # asm 2: and <mulredmask=%rdx,<t81=%r8
  2611. and %rdx,%r8
  2612. # qhasm: mult += t83
  2613. # asm 1: add <t83=int64#10,<mult=int64#4
  2614. # asm 2: add <t83=%r12,<mult=%rcx
  2615. add %r12,%rcx
  2616. # qhasm: t83 = mult
  2617. # asm 1: mov <mult=int64#4,>t83=int64#7
  2618. # asm 2: mov <mult=%rcx,>t83=%rax
  2619. mov %rcx,%rax
  2620. # qhasm: (uint64) mult >>= 51
  2621. # asm 1: shr $51,<mult=int64#4
  2622. # asm 2: shr $51,<mult=%rcx
  2623. shr $51,%rcx
  2624. # qhasm: t82 &= mulredmask
  2625. # asm 1: and <mulredmask=int64#3,<t82=int64#6
  2626. # asm 2: and <mulredmask=%rdx,<t82=%r9
  2627. and %rdx,%r9
  2628. # qhasm: mult += t84
  2629. # asm 1: add <t84=int64#12,<mult=int64#4
  2630. # asm 2: add <t84=%r14,<mult=%rcx
  2631. add %r14,%rcx
  2632. # qhasm: t84 = mult
  2633. # asm 1: mov <mult=int64#4,>t84=int64#8
  2634. # asm 2: mov <mult=%rcx,>t84=%r10
  2635. mov %rcx,%r10
  2636. # qhasm: (uint64) mult >>= 51
  2637. # asm 1: shr $51,<mult=int64#4
  2638. # asm 2: shr $51,<mult=%rcx
  2639. shr $51,%rcx
  2640. # qhasm: t83 &= mulredmask
  2641. # asm 1: and <mulredmask=int64#3,<t83=int64#7
  2642. # asm 2: and <mulredmask=%rdx,<t83=%rax
  2643. and %rdx,%rax
  2644. # qhasm: mult *= 19
  2645. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  2646. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  2647. imulq $19,%rcx,%rcx
  2648. # qhasm: t80 += mult
  2649. # asm 1: add <mult=int64#4,<t80=int64#2
  2650. # asm 2: add <mult=%rcx,<t80=%rsi
  2651. add %rcx,%rsi
  2652. # qhasm: t84 &= mulredmask
  2653. # asm 1: and <mulredmask=int64#3,<t84=int64#8
  2654. # asm 2: and <mulredmask=%rdx,<t84=%r10
  2655. and %rdx,%r10
  2656. # qhasm: zq0 = t80
  2657. # asm 1: mov <t80=int64#2,>zq0=int64#3
  2658. # asm 2: mov <t80=%rsi,>zq0=%rdx
  2659. mov %rsi,%rdx
  2660. # qhasm: zq1 = t81
  2661. # asm 1: mov <t81=int64#5,>zq1=int64#4
  2662. # asm 2: mov <t81=%r8,>zq1=%rcx
  2663. mov %r8,%rcx
  2664. # qhasm: zq2 = t82
  2665. # asm 1: mov <t82=int64#6,>zq2=int64#9
  2666. # asm 2: mov <t82=%r9,>zq2=%r11
  2667. mov %r9,%r11
  2668. # qhasm: zq3 = t83
  2669. # asm 1: mov <t83=int64#7,>zq3=int64#10
  2670. # asm 2: mov <t83=%rax,>zq3=%r12
  2671. mov %rax,%r12
  2672. # qhasm: zq4 = t84
  2673. # asm 1: mov <t84=int64#8,>zq4=int64#11
  2674. # asm 2: mov <t84=%r10,>zq4=%r13
  2675. mov %r10,%r13
  2676. # qhasm: zq0 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P0
  2677. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P0,<zq0=int64#3
  2678. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P0,<zq0=%rdx
  2679. add crypto_scalarmult_curve25519_amd64_51_2P0,%rdx
  2680. # qhasm: zq1 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  2681. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq1=int64#4
  2682. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq1=%rcx
  2683. add crypto_scalarmult_curve25519_amd64_51_2P1234,%rcx
  2684. # qhasm: zq2 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  2685. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq2=int64#9
  2686. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq2=%r11
  2687. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r11
  2688. # qhasm: zq3 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  2689. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq3=int64#10
  2690. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq3=%r12
  2691. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r12
  2692. # qhasm: zq4 += *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_2P1234
  2693. # asm 1: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq4=int64#11
  2694. # asm 2: add crypto_scalarmult_curve25519_amd64_51_2P1234,<zq4=%r13
  2695. add crypto_scalarmult_curve25519_amd64_51_2P1234,%r13
  2696. # qhasm: t80 += t90_stack
  2697. # asm 1: addq <t90_stack=stack64#13,<t80=int64#2
  2698. # asm 2: addq <t90_stack=96(%rsp),<t80=%rsi
  2699. addq 96(%rsp),%rsi
  2700. # qhasm: t81 += t91_stack
  2701. # asm 1: addq <t91_stack=stack64#14,<t81=int64#5
  2702. # asm 2: addq <t91_stack=104(%rsp),<t81=%r8
  2703. addq 104(%rsp),%r8
  2704. # qhasm: t82 += t92_stack
  2705. # asm 1: addq <t92_stack=stack64#15,<t82=int64#6
  2706. # asm 2: addq <t92_stack=112(%rsp),<t82=%r9
  2707. addq 112(%rsp),%r9
  2708. # qhasm: t83 += t93_stack
  2709. # asm 1: addq <t93_stack=stack64#16,<t83=int64#7
  2710. # asm 2: addq <t93_stack=120(%rsp),<t83=%rax
  2711. addq 120(%rsp),%rax
  2712. # qhasm: t84 += t94_stack
  2713. # asm 1: addq <t94_stack=stack64#17,<t84=int64#8
  2714. # asm 2: addq <t94_stack=128(%rsp),<t84=%r10
  2715. addq 128(%rsp),%r10
  2716. # qhasm: zq0 -= t90_stack
  2717. # asm 1: subq <t90_stack=stack64#13,<zq0=int64#3
  2718. # asm 2: subq <t90_stack=96(%rsp),<zq0=%rdx
  2719. subq 96(%rsp),%rdx
  2720. # qhasm: zq1 -= t91_stack
  2721. # asm 1: subq <t91_stack=stack64#14,<zq1=int64#4
  2722. # asm 2: subq <t91_stack=104(%rsp),<zq1=%rcx
  2723. subq 104(%rsp),%rcx
  2724. # qhasm: zq2 -= t92_stack
  2725. # asm 1: subq <t92_stack=stack64#15,<zq2=int64#9
  2726. # asm 2: subq <t92_stack=112(%rsp),<zq2=%r11
  2727. subq 112(%rsp),%r11
  2728. # qhasm: zq3 -= t93_stack
  2729. # asm 1: subq <t93_stack=stack64#16,<zq3=int64#10
  2730. # asm 2: subq <t93_stack=120(%rsp),<zq3=%r12
  2731. subq 120(%rsp),%r12
  2732. # qhasm: zq4 -= t94_stack
  2733. # asm 1: subq <t94_stack=stack64#17,<zq4=int64#11
  2734. # asm 2: subq <t94_stack=128(%rsp),<zq4=%r13
  2735. subq 128(%rsp),%r13
  2736. # qhasm: *(uint64 *)(workp + 120) = t80
  2737. # asm 1: movq <t80=int64#2,120(<workp=int64#1)
  2738. # asm 2: movq <t80=%rsi,120(<workp=%rdi)
  2739. movq %rsi,120(%rdi)
  2740. # qhasm: *(uint64 *)(workp + 128) = t81
  2741. # asm 1: movq <t81=int64#5,128(<workp=int64#1)
  2742. # asm 2: movq <t81=%r8,128(<workp=%rdi)
  2743. movq %r8,128(%rdi)
  2744. # qhasm: *(uint64 *)(workp + 136) = t82
  2745. # asm 1: movq <t82=int64#6,136(<workp=int64#1)
  2746. # asm 2: movq <t82=%r9,136(<workp=%rdi)
  2747. movq %r9,136(%rdi)
  2748. # qhasm: *(uint64 *)(workp + 144) = t83
  2749. # asm 1: movq <t83=int64#7,144(<workp=int64#1)
  2750. # asm 2: movq <t83=%rax,144(<workp=%rdi)
  2751. movq %rax,144(%rdi)
  2752. # qhasm: *(uint64 *)(workp + 152) = t84
  2753. # asm 1: movq <t84=int64#8,152(<workp=int64#1)
  2754. # asm 2: movq <t84=%r10,152(<workp=%rdi)
  2755. movq %r10,152(%rdi)
  2756. # qhasm: *(uint64 *)(workp + 160) = zq0
  2757. # asm 1: movq <zq0=int64#3,160(<workp=int64#1)
  2758. # asm 2: movq <zq0=%rdx,160(<workp=%rdi)
  2759. movq %rdx,160(%rdi)
  2760. # qhasm: *(uint64 *)(workp + 168) = zq1
  2761. # asm 1: movq <zq1=int64#4,168(<workp=int64#1)
  2762. # asm 2: movq <zq1=%rcx,168(<workp=%rdi)
  2763. movq %rcx,168(%rdi)
  2764. # qhasm: *(uint64 *)(workp + 176) = zq2
  2765. # asm 1: movq <zq2=int64#9,176(<workp=int64#1)
  2766. # asm 2: movq <zq2=%r11,176(<workp=%rdi)
  2767. movq %r11,176(%rdi)
  2768. # qhasm: *(uint64 *)(workp + 184) = zq3
  2769. # asm 1: movq <zq3=int64#10,184(<workp=int64#1)
  2770. # asm 2: movq <zq3=%r12,184(<workp=%rdi)
  2771. movq %r12,184(%rdi)
  2772. # qhasm: *(uint64 *)(workp + 192) = zq4
  2773. # asm 1: movq <zq4=int64#11,192(<workp=int64#1)
  2774. # asm 2: movq <zq4=%r13,192(<workp=%rdi)
  2775. movq %r13,192(%rdi)
  2776. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2777. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2778. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2779. movq 120(%rdi),%rax
  2780. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 120)
  2781. # asm 1: mulq 120(<workp=int64#1)
  2782. # asm 2: mulq 120(<workp=%rdi)
  2783. mulq 120(%rdi)
  2784. # qhasm: xq0 = squarerax
  2785. # asm 1: mov <squarerax=int64#7,>xq0=int64#2
  2786. # asm 2: mov <squarerax=%rax,>xq0=%rsi
  2787. mov %rax,%rsi
  2788. # qhasm: squarer01 = squarerdx
  2789. # asm 1: mov <squarerdx=int64#3,>squarer01=int64#4
  2790. # asm 2: mov <squarerdx=%rdx,>squarer01=%rcx
  2791. mov %rdx,%rcx
  2792. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2793. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2794. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2795. movq 120(%rdi),%rax
  2796. # qhasm: squarerax <<= 1
  2797. # asm 1: shl $1,<squarerax=int64#7
  2798. # asm 2: shl $1,<squarerax=%rax
  2799. shl $1,%rax
  2800. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  2801. # asm 1: mulq 128(<workp=int64#1)
  2802. # asm 2: mulq 128(<workp=%rdi)
  2803. mulq 128(%rdi)
  2804. # qhasm: xq1 = squarerax
  2805. # asm 1: mov <squarerax=int64#7,>xq1=int64#5
  2806. # asm 2: mov <squarerax=%rax,>xq1=%r8
  2807. mov %rax,%r8
  2808. # qhasm: squarer11 = squarerdx
  2809. # asm 1: mov <squarerdx=int64#3,>squarer11=int64#6
  2810. # asm 2: mov <squarerdx=%rdx,>squarer11=%r9
  2811. mov %rdx,%r9
  2812. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2813. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2814. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2815. movq 120(%rdi),%rax
  2816. # qhasm: squarerax <<= 1
  2817. # asm 1: shl $1,<squarerax=int64#7
  2818. # asm 2: shl $1,<squarerax=%rax
  2819. shl $1,%rax
  2820. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  2821. # asm 1: mulq 136(<workp=int64#1)
  2822. # asm 2: mulq 136(<workp=%rdi)
  2823. mulq 136(%rdi)
  2824. # qhasm: xq2 = squarerax
  2825. # asm 1: mov <squarerax=int64#7,>xq2=int64#8
  2826. # asm 2: mov <squarerax=%rax,>xq2=%r10
  2827. mov %rax,%r10
  2828. # qhasm: squarer21 = squarerdx
  2829. # asm 1: mov <squarerdx=int64#3,>squarer21=int64#9
  2830. # asm 2: mov <squarerdx=%rdx,>squarer21=%r11
  2831. mov %rdx,%r11
  2832. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2833. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2834. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2835. movq 120(%rdi),%rax
  2836. # qhasm: squarerax <<= 1
  2837. # asm 1: shl $1,<squarerax=int64#7
  2838. # asm 2: shl $1,<squarerax=%rax
  2839. shl $1,%rax
  2840. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  2841. # asm 1: mulq 144(<workp=int64#1)
  2842. # asm 2: mulq 144(<workp=%rdi)
  2843. mulq 144(%rdi)
  2844. # qhasm: xq3 = squarerax
  2845. # asm 1: mov <squarerax=int64#7,>xq3=int64#10
  2846. # asm 2: mov <squarerax=%rax,>xq3=%r12
  2847. mov %rax,%r12
  2848. # qhasm: squarer31 = squarerdx
  2849. # asm 1: mov <squarerdx=int64#3,>squarer31=int64#11
  2850. # asm 2: mov <squarerdx=%rdx,>squarer31=%r13
  2851. mov %rdx,%r13
  2852. # qhasm: squarerax = *(uint64 *)(workp + 120)
  2853. # asm 1: movq 120(<workp=int64#1),>squarerax=int64#7
  2854. # asm 2: movq 120(<workp=%rdi),>squarerax=%rax
  2855. movq 120(%rdi),%rax
  2856. # qhasm: squarerax <<= 1
  2857. # asm 1: shl $1,<squarerax=int64#7
  2858. # asm 2: shl $1,<squarerax=%rax
  2859. shl $1,%rax
  2860. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  2861. # asm 1: mulq 152(<workp=int64#1)
  2862. # asm 2: mulq 152(<workp=%rdi)
  2863. mulq 152(%rdi)
  2864. # qhasm: xq4 = squarerax
  2865. # asm 1: mov <squarerax=int64#7,>xq4=int64#12
  2866. # asm 2: mov <squarerax=%rax,>xq4=%r14
  2867. mov %rax,%r14
  2868. # qhasm: squarer41 = squarerdx
  2869. # asm 1: mov <squarerdx=int64#3,>squarer41=int64#13
  2870. # asm 2: mov <squarerdx=%rdx,>squarer41=%r15
  2871. mov %rdx,%r15
  2872. # qhasm: squarerax = *(uint64 *)(workp + 128)
  2873. # asm 1: movq 128(<workp=int64#1),>squarerax=int64#7
  2874. # asm 2: movq 128(<workp=%rdi),>squarerax=%rax
  2875. movq 128(%rdi),%rax
  2876. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 128)
  2877. # asm 1: mulq 128(<workp=int64#1)
  2878. # asm 2: mulq 128(<workp=%rdi)
  2879. mulq 128(%rdi)
  2880. # qhasm: carry? xq2 += squarerax
  2881. # asm 1: add <squarerax=int64#7,<xq2=int64#8
  2882. # asm 2: add <squarerax=%rax,<xq2=%r10
  2883. add %rax,%r10
  2884. # qhasm: squarer21 += squarerdx + carry
  2885. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  2886. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  2887. adc %rdx,%r11
  2888. # qhasm: squarerax = *(uint64 *)(workp + 128)
  2889. # asm 1: movq 128(<workp=int64#1),>squarerax=int64#7
  2890. # asm 2: movq 128(<workp=%rdi),>squarerax=%rax
  2891. movq 128(%rdi),%rax
  2892. # qhasm: squarerax <<= 1
  2893. # asm 1: shl $1,<squarerax=int64#7
  2894. # asm 2: shl $1,<squarerax=%rax
  2895. shl $1,%rax
  2896. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  2897. # asm 1: mulq 136(<workp=int64#1)
  2898. # asm 2: mulq 136(<workp=%rdi)
  2899. mulq 136(%rdi)
  2900. # qhasm: carry? xq3 += squarerax
  2901. # asm 1: add <squarerax=int64#7,<xq3=int64#10
  2902. # asm 2: add <squarerax=%rax,<xq3=%r12
  2903. add %rax,%r12
  2904. # qhasm: squarer31 += squarerdx + carry
  2905. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  2906. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  2907. adc %rdx,%r13
  2908. # qhasm: squarerax = *(uint64 *)(workp + 128)
  2909. # asm 1: movq 128(<workp=int64#1),>squarerax=int64#7
  2910. # asm 2: movq 128(<workp=%rdi),>squarerax=%rax
  2911. movq 128(%rdi),%rax
  2912. # qhasm: squarerax <<= 1
  2913. # asm 1: shl $1,<squarerax=int64#7
  2914. # asm 2: shl $1,<squarerax=%rax
  2915. shl $1,%rax
  2916. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  2917. # asm 1: mulq 144(<workp=int64#1)
  2918. # asm 2: mulq 144(<workp=%rdi)
  2919. mulq 144(%rdi)
  2920. # qhasm: carry? xq4 += squarerax
  2921. # asm 1: add <squarerax=int64#7,<xq4=int64#12
  2922. # asm 2: add <squarerax=%rax,<xq4=%r14
  2923. add %rax,%r14
  2924. # qhasm: squarer41 += squarerdx + carry
  2925. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  2926. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  2927. adc %rdx,%r15
  2928. # qhasm: squarerax = *(uint64 *)(workp + 128)
  2929. # asm 1: movq 128(<workp=int64#1),>squarerax=int64#3
  2930. # asm 2: movq 128(<workp=%rdi),>squarerax=%rdx
  2931. movq 128(%rdi),%rdx
  2932. # qhasm: squarerax *= 38
  2933. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  2934. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  2935. imulq $38,%rdx,%rax
  2936. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  2937. # asm 1: mulq 152(<workp=int64#1)
  2938. # asm 2: mulq 152(<workp=%rdi)
  2939. mulq 152(%rdi)
  2940. # qhasm: carry? xq0 += squarerax
  2941. # asm 1: add <squarerax=int64#7,<xq0=int64#2
  2942. # asm 2: add <squarerax=%rax,<xq0=%rsi
  2943. add %rax,%rsi
  2944. # qhasm: squarer01 += squarerdx + carry
  2945. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  2946. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  2947. adc %rdx,%rcx
  2948. # qhasm: squarerax = *(uint64 *)(workp + 136)
  2949. # asm 1: movq 136(<workp=int64#1),>squarerax=int64#7
  2950. # asm 2: movq 136(<workp=%rdi),>squarerax=%rax
  2951. movq 136(%rdi),%rax
  2952. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 136)
  2953. # asm 1: mulq 136(<workp=int64#1)
  2954. # asm 2: mulq 136(<workp=%rdi)
  2955. mulq 136(%rdi)
  2956. # qhasm: carry? xq4 += squarerax
  2957. # asm 1: add <squarerax=int64#7,<xq4=int64#12
  2958. # asm 2: add <squarerax=%rax,<xq4=%r14
  2959. add %rax,%r14
  2960. # qhasm: squarer41 += squarerdx + carry
  2961. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  2962. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  2963. adc %rdx,%r15
  2964. # qhasm: squarerax = *(uint64 *)(workp + 136)
  2965. # asm 1: movq 136(<workp=int64#1),>squarerax=int64#3
  2966. # asm 2: movq 136(<workp=%rdi),>squarerax=%rdx
  2967. movq 136(%rdi),%rdx
  2968. # qhasm: squarerax *= 38
  2969. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  2970. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  2971. imulq $38,%rdx,%rax
  2972. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  2973. # asm 1: mulq 144(<workp=int64#1)
  2974. # asm 2: mulq 144(<workp=%rdi)
  2975. mulq 144(%rdi)
  2976. # qhasm: carry? xq0 += squarerax
  2977. # asm 1: add <squarerax=int64#7,<xq0=int64#2
  2978. # asm 2: add <squarerax=%rax,<xq0=%rsi
  2979. add %rax,%rsi
  2980. # qhasm: squarer01 += squarerdx + carry
  2981. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  2982. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  2983. adc %rdx,%rcx
  2984. # qhasm: squarerax = *(uint64 *)(workp + 136)
  2985. # asm 1: movq 136(<workp=int64#1),>squarerax=int64#3
  2986. # asm 2: movq 136(<workp=%rdi),>squarerax=%rdx
  2987. movq 136(%rdi),%rdx
  2988. # qhasm: squarerax *= 38
  2989. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  2990. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  2991. imulq $38,%rdx,%rax
  2992. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  2993. # asm 1: mulq 152(<workp=int64#1)
  2994. # asm 2: mulq 152(<workp=%rdi)
  2995. mulq 152(%rdi)
  2996. # qhasm: carry? xq1 += squarerax
  2997. # asm 1: add <squarerax=int64#7,<xq1=int64#5
  2998. # asm 2: add <squarerax=%rax,<xq1=%r8
  2999. add %rax,%r8
  3000. # qhasm: squarer11 += squarerdx + carry
  3001. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  3002. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  3003. adc %rdx,%r9
  3004. # qhasm: squarerax = *(uint64 *)(workp + 144)
  3005. # asm 1: movq 144(<workp=int64#1),>squarerax=int64#3
  3006. # asm 2: movq 144(<workp=%rdi),>squarerax=%rdx
  3007. movq 144(%rdi),%rdx
  3008. # qhasm: squarerax *= 19
  3009. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  3010. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  3011. imulq $19,%rdx,%rax
  3012. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 144)
  3013. # asm 1: mulq 144(<workp=int64#1)
  3014. # asm 2: mulq 144(<workp=%rdi)
  3015. mulq 144(%rdi)
  3016. # qhasm: carry? xq1 += squarerax
  3017. # asm 1: add <squarerax=int64#7,<xq1=int64#5
  3018. # asm 2: add <squarerax=%rax,<xq1=%r8
  3019. add %rax,%r8
  3020. # qhasm: squarer11 += squarerdx + carry
  3021. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  3022. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  3023. adc %rdx,%r9
  3024. # qhasm: squarerax = *(uint64 *)(workp + 144)
  3025. # asm 1: movq 144(<workp=int64#1),>squarerax=int64#3
  3026. # asm 2: movq 144(<workp=%rdi),>squarerax=%rdx
  3027. movq 144(%rdi),%rdx
  3028. # qhasm: squarerax *= 38
  3029. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  3030. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  3031. imulq $38,%rdx,%rax
  3032. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  3033. # asm 1: mulq 152(<workp=int64#1)
  3034. # asm 2: mulq 152(<workp=%rdi)
  3035. mulq 152(%rdi)
  3036. # qhasm: carry? xq2 += squarerax
  3037. # asm 1: add <squarerax=int64#7,<xq2=int64#8
  3038. # asm 2: add <squarerax=%rax,<xq2=%r10
  3039. add %rax,%r10
  3040. # qhasm: squarer21 += squarerdx + carry
  3041. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  3042. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  3043. adc %rdx,%r11
  3044. # qhasm: squarerax = *(uint64 *)(workp + 152)
  3045. # asm 1: movq 152(<workp=int64#1),>squarerax=int64#3
  3046. # asm 2: movq 152(<workp=%rdi),>squarerax=%rdx
  3047. movq 152(%rdi),%rdx
  3048. # qhasm: squarerax *= 19
  3049. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  3050. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  3051. imulq $19,%rdx,%rax
  3052. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 152)
  3053. # asm 1: mulq 152(<workp=int64#1)
  3054. # asm 2: mulq 152(<workp=%rdi)
  3055. mulq 152(%rdi)
  3056. # qhasm: carry? xq3 += squarerax
  3057. # asm 1: add <squarerax=int64#7,<xq3=int64#10
  3058. # asm 2: add <squarerax=%rax,<xq3=%r12
  3059. add %rax,%r12
  3060. # qhasm: squarer31 += squarerdx + carry
  3061. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  3062. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  3063. adc %rdx,%r13
  3064. # qhasm: squareredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  3065. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=int64#3
  3066. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=%rdx
  3067. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  3068. # qhasm: squarer01 = (squarer01.xq0) << 13
  3069. # asm 1: shld $13,<xq0=int64#2,<squarer01=int64#4
  3070. # asm 2: shld $13,<xq0=%rsi,<squarer01=%rcx
  3071. shld $13,%rsi,%rcx
  3072. # qhasm: xq0 &= squareredmask
  3073. # asm 1: and <squareredmask=int64#3,<xq0=int64#2
  3074. # asm 2: and <squareredmask=%rdx,<xq0=%rsi
  3075. and %rdx,%rsi
  3076. # qhasm: squarer11 = (squarer11.xq1) << 13
  3077. # asm 1: shld $13,<xq1=int64#5,<squarer11=int64#6
  3078. # asm 2: shld $13,<xq1=%r8,<squarer11=%r9
  3079. shld $13,%r8,%r9
  3080. # qhasm: xq1 &= squareredmask
  3081. # asm 1: and <squareredmask=int64#3,<xq1=int64#5
  3082. # asm 2: and <squareredmask=%rdx,<xq1=%r8
  3083. and %rdx,%r8
  3084. # qhasm: xq1 += squarer01
  3085. # asm 1: add <squarer01=int64#4,<xq1=int64#5
  3086. # asm 2: add <squarer01=%rcx,<xq1=%r8
  3087. add %rcx,%r8
  3088. # qhasm: squarer21 = (squarer21.xq2) << 13
  3089. # asm 1: shld $13,<xq2=int64#8,<squarer21=int64#9
  3090. # asm 2: shld $13,<xq2=%r10,<squarer21=%r11
  3091. shld $13,%r10,%r11
  3092. # qhasm: xq2 &= squareredmask
  3093. # asm 1: and <squareredmask=int64#3,<xq2=int64#8
  3094. # asm 2: and <squareredmask=%rdx,<xq2=%r10
  3095. and %rdx,%r10
  3096. # qhasm: xq2 += squarer11
  3097. # asm 1: add <squarer11=int64#6,<xq2=int64#8
  3098. # asm 2: add <squarer11=%r9,<xq2=%r10
  3099. add %r9,%r10
  3100. # qhasm: squarer31 = (squarer31.xq3) << 13
  3101. # asm 1: shld $13,<xq3=int64#10,<squarer31=int64#11
  3102. # asm 2: shld $13,<xq3=%r12,<squarer31=%r13
  3103. shld $13,%r12,%r13
  3104. # qhasm: xq3 &= squareredmask
  3105. # asm 1: and <squareredmask=int64#3,<xq3=int64#10
  3106. # asm 2: and <squareredmask=%rdx,<xq3=%r12
  3107. and %rdx,%r12
  3108. # qhasm: xq3 += squarer21
  3109. # asm 1: add <squarer21=int64#9,<xq3=int64#10
  3110. # asm 2: add <squarer21=%r11,<xq3=%r12
  3111. add %r11,%r12
  3112. # qhasm: squarer41 = (squarer41.xq4) << 13
  3113. # asm 1: shld $13,<xq4=int64#12,<squarer41=int64#13
  3114. # asm 2: shld $13,<xq4=%r14,<squarer41=%r15
  3115. shld $13,%r14,%r15
  3116. # qhasm: xq4 &= squareredmask
  3117. # asm 1: and <squareredmask=int64#3,<xq4=int64#12
  3118. # asm 2: and <squareredmask=%rdx,<xq4=%r14
  3119. and %rdx,%r14
  3120. # qhasm: xq4 += squarer31
  3121. # asm 1: add <squarer31=int64#11,<xq4=int64#12
  3122. # asm 2: add <squarer31=%r13,<xq4=%r14
  3123. add %r13,%r14
  3124. # qhasm: squarer41 = squarer41 * 19
  3125. # asm 1: imulq $19,<squarer41=int64#13,>squarer41=int64#4
  3126. # asm 2: imulq $19,<squarer41=%r15,>squarer41=%rcx
  3127. imulq $19,%r15,%rcx
  3128. # qhasm: xq0 += squarer41
  3129. # asm 1: add <squarer41=int64#4,<xq0=int64#2
  3130. # asm 2: add <squarer41=%rcx,<xq0=%rsi
  3131. add %rcx,%rsi
  3132. # qhasm: squaret = xq0
  3133. # asm 1: mov <xq0=int64#2,>squaret=int64#4
  3134. # asm 2: mov <xq0=%rsi,>squaret=%rcx
  3135. mov %rsi,%rcx
  3136. # qhasm: (uint64) squaret >>= 51
  3137. # asm 1: shr $51,<squaret=int64#4
  3138. # asm 2: shr $51,<squaret=%rcx
  3139. shr $51,%rcx
  3140. # qhasm: squaret += xq1
  3141. # asm 1: add <xq1=int64#5,<squaret=int64#4
  3142. # asm 2: add <xq1=%r8,<squaret=%rcx
  3143. add %r8,%rcx
  3144. # qhasm: xq0 &= squareredmask
  3145. # asm 1: and <squareredmask=int64#3,<xq0=int64#2
  3146. # asm 2: and <squareredmask=%rdx,<xq0=%rsi
  3147. and %rdx,%rsi
  3148. # qhasm: xq1 = squaret
  3149. # asm 1: mov <squaret=int64#4,>xq1=int64#5
  3150. # asm 2: mov <squaret=%rcx,>xq1=%r8
  3151. mov %rcx,%r8
  3152. # qhasm: (uint64) squaret >>= 51
  3153. # asm 1: shr $51,<squaret=int64#4
  3154. # asm 2: shr $51,<squaret=%rcx
  3155. shr $51,%rcx
  3156. # qhasm: squaret += xq2
  3157. # asm 1: add <xq2=int64#8,<squaret=int64#4
  3158. # asm 2: add <xq2=%r10,<squaret=%rcx
  3159. add %r10,%rcx
  3160. # qhasm: xq1 &= squareredmask
  3161. # asm 1: and <squareredmask=int64#3,<xq1=int64#5
  3162. # asm 2: and <squareredmask=%rdx,<xq1=%r8
  3163. and %rdx,%r8
  3164. # qhasm: xq2 = squaret
  3165. # asm 1: mov <squaret=int64#4,>xq2=int64#6
  3166. # asm 2: mov <squaret=%rcx,>xq2=%r9
  3167. mov %rcx,%r9
  3168. # qhasm: (uint64) squaret >>= 51
  3169. # asm 1: shr $51,<squaret=int64#4
  3170. # asm 2: shr $51,<squaret=%rcx
  3171. shr $51,%rcx
  3172. # qhasm: squaret += xq3
  3173. # asm 1: add <xq3=int64#10,<squaret=int64#4
  3174. # asm 2: add <xq3=%r12,<squaret=%rcx
  3175. add %r12,%rcx
  3176. # qhasm: xq2 &= squareredmask
  3177. # asm 1: and <squareredmask=int64#3,<xq2=int64#6
  3178. # asm 2: and <squareredmask=%rdx,<xq2=%r9
  3179. and %rdx,%r9
  3180. # qhasm: xq3 = squaret
  3181. # asm 1: mov <squaret=int64#4,>xq3=int64#7
  3182. # asm 2: mov <squaret=%rcx,>xq3=%rax
  3183. mov %rcx,%rax
  3184. # qhasm: (uint64) squaret >>= 51
  3185. # asm 1: shr $51,<squaret=int64#4
  3186. # asm 2: shr $51,<squaret=%rcx
  3187. shr $51,%rcx
  3188. # qhasm: squaret += xq4
  3189. # asm 1: add <xq4=int64#12,<squaret=int64#4
  3190. # asm 2: add <xq4=%r14,<squaret=%rcx
  3191. add %r14,%rcx
  3192. # qhasm: xq3 &= squareredmask
  3193. # asm 1: and <squareredmask=int64#3,<xq3=int64#7
  3194. # asm 2: and <squareredmask=%rdx,<xq3=%rax
  3195. and %rdx,%rax
  3196. # qhasm: xq4 = squaret
  3197. # asm 1: mov <squaret=int64#4,>xq4=int64#8
  3198. # asm 2: mov <squaret=%rcx,>xq4=%r10
  3199. mov %rcx,%r10
  3200. # qhasm: (uint64) squaret >>= 51
  3201. # asm 1: shr $51,<squaret=int64#4
  3202. # asm 2: shr $51,<squaret=%rcx
  3203. shr $51,%rcx
  3204. # qhasm: squaret *= 19
  3205. # asm 1: imulq $19,<squaret=int64#4,>squaret=int64#4
  3206. # asm 2: imulq $19,<squaret=%rcx,>squaret=%rcx
  3207. imulq $19,%rcx,%rcx
  3208. # qhasm: xq0 += squaret
  3209. # asm 1: add <squaret=int64#4,<xq0=int64#2
  3210. # asm 2: add <squaret=%rcx,<xq0=%rsi
  3211. add %rcx,%rsi
  3212. # qhasm: xq4 &= squareredmask
  3213. # asm 1: and <squareredmask=int64#3,<xq4=int64#8
  3214. # asm 2: and <squareredmask=%rdx,<xq4=%r10
  3215. and %rdx,%r10
  3216. # qhasm: *(uint64 *)(workp + 120) = xq0
  3217. # asm 1: movq <xq0=int64#2,120(<workp=int64#1)
  3218. # asm 2: movq <xq0=%rsi,120(<workp=%rdi)
  3219. movq %rsi,120(%rdi)
  3220. # qhasm: *(uint64 *)(workp + 128) = xq1
  3221. # asm 1: movq <xq1=int64#5,128(<workp=int64#1)
  3222. # asm 2: movq <xq1=%r8,128(<workp=%rdi)
  3223. movq %r8,128(%rdi)
  3224. # qhasm: *(uint64 *)(workp + 136) = xq2
  3225. # asm 1: movq <xq2=int64#6,136(<workp=int64#1)
  3226. # asm 2: movq <xq2=%r9,136(<workp=%rdi)
  3227. movq %r9,136(%rdi)
  3228. # qhasm: *(uint64 *)(workp + 144) = xq3
  3229. # asm 1: movq <xq3=int64#7,144(<workp=int64#1)
  3230. # asm 2: movq <xq3=%rax,144(<workp=%rdi)
  3231. movq %rax,144(%rdi)
  3232. # qhasm: *(uint64 *)(workp + 152) = xq4
  3233. # asm 1: movq <xq4=int64#8,152(<workp=int64#1)
  3234. # asm 2: movq <xq4=%r10,152(<workp=%rdi)
  3235. movq %r10,152(%rdi)
  3236. # qhasm: squarerax = *(uint64 *)(workp + 160)
  3237. # asm 1: movq 160(<workp=int64#1),>squarerax=int64#7
  3238. # asm 2: movq 160(<workp=%rdi),>squarerax=%rax
  3239. movq 160(%rdi),%rax
  3240. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 160)
  3241. # asm 1: mulq 160(<workp=int64#1)
  3242. # asm 2: mulq 160(<workp=%rdi)
  3243. mulq 160(%rdi)
  3244. # qhasm: zq0 = squarerax
  3245. # asm 1: mov <squarerax=int64#7,>zq0=int64#2
  3246. # asm 2: mov <squarerax=%rax,>zq0=%rsi
  3247. mov %rax,%rsi
  3248. # qhasm: squarer01 = squarerdx
  3249. # asm 1: mov <squarerdx=int64#3,>squarer01=int64#4
  3250. # asm 2: mov <squarerdx=%rdx,>squarer01=%rcx
  3251. mov %rdx,%rcx
  3252. # qhasm: squarerax = *(uint64 *)(workp + 160)
  3253. # asm 1: movq 160(<workp=int64#1),>squarerax=int64#7
  3254. # asm 2: movq 160(<workp=%rdi),>squarerax=%rax
  3255. movq 160(%rdi),%rax
  3256. # qhasm: squarerax <<= 1
  3257. # asm 1: shl $1,<squarerax=int64#7
  3258. # asm 2: shl $1,<squarerax=%rax
  3259. shl $1,%rax
  3260. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 168)
  3261. # asm 1: mulq 168(<workp=int64#1)
  3262. # asm 2: mulq 168(<workp=%rdi)
  3263. mulq 168(%rdi)
  3264. # qhasm: zq1 = squarerax
  3265. # asm 1: mov <squarerax=int64#7,>zq1=int64#5
  3266. # asm 2: mov <squarerax=%rax,>zq1=%r8
  3267. mov %rax,%r8
  3268. # qhasm: squarer11 = squarerdx
  3269. # asm 1: mov <squarerdx=int64#3,>squarer11=int64#6
  3270. # asm 2: mov <squarerdx=%rdx,>squarer11=%r9
  3271. mov %rdx,%r9
  3272. # qhasm: squarerax = *(uint64 *)(workp + 160)
  3273. # asm 1: movq 160(<workp=int64#1),>squarerax=int64#7
  3274. # asm 2: movq 160(<workp=%rdi),>squarerax=%rax
  3275. movq 160(%rdi),%rax
  3276. # qhasm: squarerax <<= 1
  3277. # asm 1: shl $1,<squarerax=int64#7
  3278. # asm 2: shl $1,<squarerax=%rax
  3279. shl $1,%rax
  3280. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 176)
  3281. # asm 1: mulq 176(<workp=int64#1)
  3282. # asm 2: mulq 176(<workp=%rdi)
  3283. mulq 176(%rdi)
  3284. # qhasm: zq2 = squarerax
  3285. # asm 1: mov <squarerax=int64#7,>zq2=int64#8
  3286. # asm 2: mov <squarerax=%rax,>zq2=%r10
  3287. mov %rax,%r10
  3288. # qhasm: squarer21 = squarerdx
  3289. # asm 1: mov <squarerdx=int64#3,>squarer21=int64#9
  3290. # asm 2: mov <squarerdx=%rdx,>squarer21=%r11
  3291. mov %rdx,%r11
  3292. # qhasm: squarerax = *(uint64 *)(workp + 160)
  3293. # asm 1: movq 160(<workp=int64#1),>squarerax=int64#7
  3294. # asm 2: movq 160(<workp=%rdi),>squarerax=%rax
  3295. movq 160(%rdi),%rax
  3296. # qhasm: squarerax <<= 1
  3297. # asm 1: shl $1,<squarerax=int64#7
  3298. # asm 2: shl $1,<squarerax=%rax
  3299. shl $1,%rax
  3300. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 184)
  3301. # asm 1: mulq 184(<workp=int64#1)
  3302. # asm 2: mulq 184(<workp=%rdi)
  3303. mulq 184(%rdi)
  3304. # qhasm: zq3 = squarerax
  3305. # asm 1: mov <squarerax=int64#7,>zq3=int64#10
  3306. # asm 2: mov <squarerax=%rax,>zq3=%r12
  3307. mov %rax,%r12
  3308. # qhasm: squarer31 = squarerdx
  3309. # asm 1: mov <squarerdx=int64#3,>squarer31=int64#11
  3310. # asm 2: mov <squarerdx=%rdx,>squarer31=%r13
  3311. mov %rdx,%r13
  3312. # qhasm: squarerax = *(uint64 *)(workp + 160)
  3313. # asm 1: movq 160(<workp=int64#1),>squarerax=int64#7
  3314. # asm 2: movq 160(<workp=%rdi),>squarerax=%rax
  3315. movq 160(%rdi),%rax
  3316. # qhasm: squarerax <<= 1
  3317. # asm 1: shl $1,<squarerax=int64#7
  3318. # asm 2: shl $1,<squarerax=%rax
  3319. shl $1,%rax
  3320. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 192)
  3321. # asm 1: mulq 192(<workp=int64#1)
  3322. # asm 2: mulq 192(<workp=%rdi)
  3323. mulq 192(%rdi)
  3324. # qhasm: zq4 = squarerax
  3325. # asm 1: mov <squarerax=int64#7,>zq4=int64#12
  3326. # asm 2: mov <squarerax=%rax,>zq4=%r14
  3327. mov %rax,%r14
  3328. # qhasm: squarer41 = squarerdx
  3329. # asm 1: mov <squarerdx=int64#3,>squarer41=int64#13
  3330. # asm 2: mov <squarerdx=%rdx,>squarer41=%r15
  3331. mov %rdx,%r15
  3332. # qhasm: squarerax = *(uint64 *)(workp + 168)
  3333. # asm 1: movq 168(<workp=int64#1),>squarerax=int64#7
  3334. # asm 2: movq 168(<workp=%rdi),>squarerax=%rax
  3335. movq 168(%rdi),%rax
  3336. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 168)
  3337. # asm 1: mulq 168(<workp=int64#1)
  3338. # asm 2: mulq 168(<workp=%rdi)
  3339. mulq 168(%rdi)
  3340. # qhasm: carry? zq2 += squarerax
  3341. # asm 1: add <squarerax=int64#7,<zq2=int64#8
  3342. # asm 2: add <squarerax=%rax,<zq2=%r10
  3343. add %rax,%r10
  3344. # qhasm: squarer21 += squarerdx + carry
  3345. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  3346. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  3347. adc %rdx,%r11
  3348. # qhasm: squarerax = *(uint64 *)(workp + 168)
  3349. # asm 1: movq 168(<workp=int64#1),>squarerax=int64#7
  3350. # asm 2: movq 168(<workp=%rdi),>squarerax=%rax
  3351. movq 168(%rdi),%rax
  3352. # qhasm: squarerax <<= 1
  3353. # asm 1: shl $1,<squarerax=int64#7
  3354. # asm 2: shl $1,<squarerax=%rax
  3355. shl $1,%rax
  3356. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 176)
  3357. # asm 1: mulq 176(<workp=int64#1)
  3358. # asm 2: mulq 176(<workp=%rdi)
  3359. mulq 176(%rdi)
  3360. # qhasm: carry? zq3 += squarerax
  3361. # asm 1: add <squarerax=int64#7,<zq3=int64#10
  3362. # asm 2: add <squarerax=%rax,<zq3=%r12
  3363. add %rax,%r12
  3364. # qhasm: squarer31 += squarerdx + carry
  3365. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  3366. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  3367. adc %rdx,%r13
  3368. # qhasm: squarerax = *(uint64 *)(workp + 168)
  3369. # asm 1: movq 168(<workp=int64#1),>squarerax=int64#7
  3370. # asm 2: movq 168(<workp=%rdi),>squarerax=%rax
  3371. movq 168(%rdi),%rax
  3372. # qhasm: squarerax <<= 1
  3373. # asm 1: shl $1,<squarerax=int64#7
  3374. # asm 2: shl $1,<squarerax=%rax
  3375. shl $1,%rax
  3376. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 184)
  3377. # asm 1: mulq 184(<workp=int64#1)
  3378. # asm 2: mulq 184(<workp=%rdi)
  3379. mulq 184(%rdi)
  3380. # qhasm: carry? zq4 += squarerax
  3381. # asm 1: add <squarerax=int64#7,<zq4=int64#12
  3382. # asm 2: add <squarerax=%rax,<zq4=%r14
  3383. add %rax,%r14
  3384. # qhasm: squarer41 += squarerdx + carry
  3385. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  3386. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  3387. adc %rdx,%r15
  3388. # qhasm: squarerax = *(uint64 *)(workp + 168)
  3389. # asm 1: movq 168(<workp=int64#1),>squarerax=int64#3
  3390. # asm 2: movq 168(<workp=%rdi),>squarerax=%rdx
  3391. movq 168(%rdi),%rdx
  3392. # qhasm: squarerax *= 38
  3393. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  3394. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  3395. imulq $38,%rdx,%rax
  3396. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 192)
  3397. # asm 1: mulq 192(<workp=int64#1)
  3398. # asm 2: mulq 192(<workp=%rdi)
  3399. mulq 192(%rdi)
  3400. # qhasm: carry? zq0 += squarerax
  3401. # asm 1: add <squarerax=int64#7,<zq0=int64#2
  3402. # asm 2: add <squarerax=%rax,<zq0=%rsi
  3403. add %rax,%rsi
  3404. # qhasm: squarer01 += squarerdx + carry
  3405. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  3406. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  3407. adc %rdx,%rcx
  3408. # qhasm: squarerax = *(uint64 *)(workp + 176)
  3409. # asm 1: movq 176(<workp=int64#1),>squarerax=int64#7
  3410. # asm 2: movq 176(<workp=%rdi),>squarerax=%rax
  3411. movq 176(%rdi),%rax
  3412. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 176)
  3413. # asm 1: mulq 176(<workp=int64#1)
  3414. # asm 2: mulq 176(<workp=%rdi)
  3415. mulq 176(%rdi)
  3416. # qhasm: carry? zq4 += squarerax
  3417. # asm 1: add <squarerax=int64#7,<zq4=int64#12
  3418. # asm 2: add <squarerax=%rax,<zq4=%r14
  3419. add %rax,%r14
  3420. # qhasm: squarer41 += squarerdx + carry
  3421. # asm 1: adc <squarerdx=int64#3,<squarer41=int64#13
  3422. # asm 2: adc <squarerdx=%rdx,<squarer41=%r15
  3423. adc %rdx,%r15
  3424. # qhasm: squarerax = *(uint64 *)(workp + 176)
  3425. # asm 1: movq 176(<workp=int64#1),>squarerax=int64#3
  3426. # asm 2: movq 176(<workp=%rdi),>squarerax=%rdx
  3427. movq 176(%rdi),%rdx
  3428. # qhasm: squarerax *= 38
  3429. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  3430. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  3431. imulq $38,%rdx,%rax
  3432. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 184)
  3433. # asm 1: mulq 184(<workp=int64#1)
  3434. # asm 2: mulq 184(<workp=%rdi)
  3435. mulq 184(%rdi)
  3436. # qhasm: carry? zq0 += squarerax
  3437. # asm 1: add <squarerax=int64#7,<zq0=int64#2
  3438. # asm 2: add <squarerax=%rax,<zq0=%rsi
  3439. add %rax,%rsi
  3440. # qhasm: squarer01 += squarerdx + carry
  3441. # asm 1: adc <squarerdx=int64#3,<squarer01=int64#4
  3442. # asm 2: adc <squarerdx=%rdx,<squarer01=%rcx
  3443. adc %rdx,%rcx
  3444. # qhasm: squarerax = *(uint64 *)(workp + 176)
  3445. # asm 1: movq 176(<workp=int64#1),>squarerax=int64#3
  3446. # asm 2: movq 176(<workp=%rdi),>squarerax=%rdx
  3447. movq 176(%rdi),%rdx
  3448. # qhasm: squarerax *= 38
  3449. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  3450. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  3451. imulq $38,%rdx,%rax
  3452. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 192)
  3453. # asm 1: mulq 192(<workp=int64#1)
  3454. # asm 2: mulq 192(<workp=%rdi)
  3455. mulq 192(%rdi)
  3456. # qhasm: carry? zq1 += squarerax
  3457. # asm 1: add <squarerax=int64#7,<zq1=int64#5
  3458. # asm 2: add <squarerax=%rax,<zq1=%r8
  3459. add %rax,%r8
  3460. # qhasm: squarer11 += squarerdx + carry
  3461. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  3462. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  3463. adc %rdx,%r9
  3464. # qhasm: squarerax = *(uint64 *)(workp + 184)
  3465. # asm 1: movq 184(<workp=int64#1),>squarerax=int64#3
  3466. # asm 2: movq 184(<workp=%rdi),>squarerax=%rdx
  3467. movq 184(%rdi),%rdx
  3468. # qhasm: squarerax *= 19
  3469. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  3470. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  3471. imulq $19,%rdx,%rax
  3472. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 184)
  3473. # asm 1: mulq 184(<workp=int64#1)
  3474. # asm 2: mulq 184(<workp=%rdi)
  3475. mulq 184(%rdi)
  3476. # qhasm: carry? zq1 += squarerax
  3477. # asm 1: add <squarerax=int64#7,<zq1=int64#5
  3478. # asm 2: add <squarerax=%rax,<zq1=%r8
  3479. add %rax,%r8
  3480. # qhasm: squarer11 += squarerdx + carry
  3481. # asm 1: adc <squarerdx=int64#3,<squarer11=int64#6
  3482. # asm 2: adc <squarerdx=%rdx,<squarer11=%r9
  3483. adc %rdx,%r9
  3484. # qhasm: squarerax = *(uint64 *)(workp + 184)
  3485. # asm 1: movq 184(<workp=int64#1),>squarerax=int64#3
  3486. # asm 2: movq 184(<workp=%rdi),>squarerax=%rdx
  3487. movq 184(%rdi),%rdx
  3488. # qhasm: squarerax *= 38
  3489. # asm 1: imulq $38,<squarerax=int64#3,>squarerax=int64#7
  3490. # asm 2: imulq $38,<squarerax=%rdx,>squarerax=%rax
  3491. imulq $38,%rdx,%rax
  3492. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 192)
  3493. # asm 1: mulq 192(<workp=int64#1)
  3494. # asm 2: mulq 192(<workp=%rdi)
  3495. mulq 192(%rdi)
  3496. # qhasm: carry? zq2 += squarerax
  3497. # asm 1: add <squarerax=int64#7,<zq2=int64#8
  3498. # asm 2: add <squarerax=%rax,<zq2=%r10
  3499. add %rax,%r10
  3500. # qhasm: squarer21 += squarerdx + carry
  3501. # asm 1: adc <squarerdx=int64#3,<squarer21=int64#9
  3502. # asm 2: adc <squarerdx=%rdx,<squarer21=%r11
  3503. adc %rdx,%r11
  3504. # qhasm: squarerax = *(uint64 *)(workp + 192)
  3505. # asm 1: movq 192(<workp=int64#1),>squarerax=int64#3
  3506. # asm 2: movq 192(<workp=%rdi),>squarerax=%rdx
  3507. movq 192(%rdi),%rdx
  3508. # qhasm: squarerax *= 19
  3509. # asm 1: imulq $19,<squarerax=int64#3,>squarerax=int64#7
  3510. # asm 2: imulq $19,<squarerax=%rdx,>squarerax=%rax
  3511. imulq $19,%rdx,%rax
  3512. # qhasm: (uint128) squarerdx squarerax = squarerax * *(uint64 *)(workp + 192)
  3513. # asm 1: mulq 192(<workp=int64#1)
  3514. # asm 2: mulq 192(<workp=%rdi)
  3515. mulq 192(%rdi)
  3516. # qhasm: carry? zq3 += squarerax
  3517. # asm 1: add <squarerax=int64#7,<zq3=int64#10
  3518. # asm 2: add <squarerax=%rax,<zq3=%r12
  3519. add %rax,%r12
  3520. # qhasm: squarer31 += squarerdx + carry
  3521. # asm 1: adc <squarerdx=int64#3,<squarer31=int64#11
  3522. # asm 2: adc <squarerdx=%rdx,<squarer31=%r13
  3523. adc %rdx,%r13
  3524. # qhasm: squareredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  3525. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=int64#3
  3526. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>squareredmask=%rdx
  3527. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  3528. # qhasm: squarer01 = (squarer01.zq0) << 13
  3529. # asm 1: shld $13,<zq0=int64#2,<squarer01=int64#4
  3530. # asm 2: shld $13,<zq0=%rsi,<squarer01=%rcx
  3531. shld $13,%rsi,%rcx
  3532. # qhasm: zq0 &= squareredmask
  3533. # asm 1: and <squareredmask=int64#3,<zq0=int64#2
  3534. # asm 2: and <squareredmask=%rdx,<zq0=%rsi
  3535. and %rdx,%rsi
  3536. # qhasm: squarer11 = (squarer11.zq1) << 13
  3537. # asm 1: shld $13,<zq1=int64#5,<squarer11=int64#6
  3538. # asm 2: shld $13,<zq1=%r8,<squarer11=%r9
  3539. shld $13,%r8,%r9
  3540. # qhasm: zq1 &= squareredmask
  3541. # asm 1: and <squareredmask=int64#3,<zq1=int64#5
  3542. # asm 2: and <squareredmask=%rdx,<zq1=%r8
  3543. and %rdx,%r8
  3544. # qhasm: zq1 += squarer01
  3545. # asm 1: add <squarer01=int64#4,<zq1=int64#5
  3546. # asm 2: add <squarer01=%rcx,<zq1=%r8
  3547. add %rcx,%r8
  3548. # qhasm: squarer21 = (squarer21.zq2) << 13
  3549. # asm 1: shld $13,<zq2=int64#8,<squarer21=int64#9
  3550. # asm 2: shld $13,<zq2=%r10,<squarer21=%r11
  3551. shld $13,%r10,%r11
  3552. # qhasm: zq2 &= squareredmask
  3553. # asm 1: and <squareredmask=int64#3,<zq2=int64#8
  3554. # asm 2: and <squareredmask=%rdx,<zq2=%r10
  3555. and %rdx,%r10
  3556. # qhasm: zq2 += squarer11
  3557. # asm 1: add <squarer11=int64#6,<zq2=int64#8
  3558. # asm 2: add <squarer11=%r9,<zq2=%r10
  3559. add %r9,%r10
  3560. # qhasm: squarer31 = (squarer31.zq3) << 13
  3561. # asm 1: shld $13,<zq3=int64#10,<squarer31=int64#11
  3562. # asm 2: shld $13,<zq3=%r12,<squarer31=%r13
  3563. shld $13,%r12,%r13
  3564. # qhasm: zq3 &= squareredmask
  3565. # asm 1: and <squareredmask=int64#3,<zq3=int64#10
  3566. # asm 2: and <squareredmask=%rdx,<zq3=%r12
  3567. and %rdx,%r12
  3568. # qhasm: zq3 += squarer21
  3569. # asm 1: add <squarer21=int64#9,<zq3=int64#10
  3570. # asm 2: add <squarer21=%r11,<zq3=%r12
  3571. add %r11,%r12
  3572. # qhasm: squarer41 = (squarer41.zq4) << 13
  3573. # asm 1: shld $13,<zq4=int64#12,<squarer41=int64#13
  3574. # asm 2: shld $13,<zq4=%r14,<squarer41=%r15
  3575. shld $13,%r14,%r15
  3576. # qhasm: zq4 &= squareredmask
  3577. # asm 1: and <squareredmask=int64#3,<zq4=int64#12
  3578. # asm 2: and <squareredmask=%rdx,<zq4=%r14
  3579. and %rdx,%r14
  3580. # qhasm: zq4 += squarer31
  3581. # asm 1: add <squarer31=int64#11,<zq4=int64#12
  3582. # asm 2: add <squarer31=%r13,<zq4=%r14
  3583. add %r13,%r14
  3584. # qhasm: squarer41 = squarer41 * 19
  3585. # asm 1: imulq $19,<squarer41=int64#13,>squarer41=int64#4
  3586. # asm 2: imulq $19,<squarer41=%r15,>squarer41=%rcx
  3587. imulq $19,%r15,%rcx
  3588. # qhasm: zq0 += squarer41
  3589. # asm 1: add <squarer41=int64#4,<zq0=int64#2
  3590. # asm 2: add <squarer41=%rcx,<zq0=%rsi
  3591. add %rcx,%rsi
  3592. # qhasm: squaret = zq0
  3593. # asm 1: mov <zq0=int64#2,>squaret=int64#4
  3594. # asm 2: mov <zq0=%rsi,>squaret=%rcx
  3595. mov %rsi,%rcx
  3596. # qhasm: (uint64) squaret >>= 51
  3597. # asm 1: shr $51,<squaret=int64#4
  3598. # asm 2: shr $51,<squaret=%rcx
  3599. shr $51,%rcx
  3600. # qhasm: squaret += zq1
  3601. # asm 1: add <zq1=int64#5,<squaret=int64#4
  3602. # asm 2: add <zq1=%r8,<squaret=%rcx
  3603. add %r8,%rcx
  3604. # qhasm: zq0 &= squareredmask
  3605. # asm 1: and <squareredmask=int64#3,<zq0=int64#2
  3606. # asm 2: and <squareredmask=%rdx,<zq0=%rsi
  3607. and %rdx,%rsi
  3608. # qhasm: zq1 = squaret
  3609. # asm 1: mov <squaret=int64#4,>zq1=int64#5
  3610. # asm 2: mov <squaret=%rcx,>zq1=%r8
  3611. mov %rcx,%r8
  3612. # qhasm: (uint64) squaret >>= 51
  3613. # asm 1: shr $51,<squaret=int64#4
  3614. # asm 2: shr $51,<squaret=%rcx
  3615. shr $51,%rcx
  3616. # qhasm: squaret += zq2
  3617. # asm 1: add <zq2=int64#8,<squaret=int64#4
  3618. # asm 2: add <zq2=%r10,<squaret=%rcx
  3619. add %r10,%rcx
  3620. # qhasm: zq1 &= squareredmask
  3621. # asm 1: and <squareredmask=int64#3,<zq1=int64#5
  3622. # asm 2: and <squareredmask=%rdx,<zq1=%r8
  3623. and %rdx,%r8
  3624. # qhasm: zq2 = squaret
  3625. # asm 1: mov <squaret=int64#4,>zq2=int64#6
  3626. # asm 2: mov <squaret=%rcx,>zq2=%r9
  3627. mov %rcx,%r9
  3628. # qhasm: (uint64) squaret >>= 51
  3629. # asm 1: shr $51,<squaret=int64#4
  3630. # asm 2: shr $51,<squaret=%rcx
  3631. shr $51,%rcx
  3632. # qhasm: squaret += zq3
  3633. # asm 1: add <zq3=int64#10,<squaret=int64#4
  3634. # asm 2: add <zq3=%r12,<squaret=%rcx
  3635. add %r12,%rcx
  3636. # qhasm: zq2 &= squareredmask
  3637. # asm 1: and <squareredmask=int64#3,<zq2=int64#6
  3638. # asm 2: and <squareredmask=%rdx,<zq2=%r9
  3639. and %rdx,%r9
  3640. # qhasm: zq3 = squaret
  3641. # asm 1: mov <squaret=int64#4,>zq3=int64#7
  3642. # asm 2: mov <squaret=%rcx,>zq3=%rax
  3643. mov %rcx,%rax
  3644. # qhasm: (uint64) squaret >>= 51
  3645. # asm 1: shr $51,<squaret=int64#4
  3646. # asm 2: shr $51,<squaret=%rcx
  3647. shr $51,%rcx
  3648. # qhasm: squaret += zq4
  3649. # asm 1: add <zq4=int64#12,<squaret=int64#4
  3650. # asm 2: add <zq4=%r14,<squaret=%rcx
  3651. add %r14,%rcx
  3652. # qhasm: zq3 &= squareredmask
  3653. # asm 1: and <squareredmask=int64#3,<zq3=int64#7
  3654. # asm 2: and <squareredmask=%rdx,<zq3=%rax
  3655. and %rdx,%rax
  3656. # qhasm: zq4 = squaret
  3657. # asm 1: mov <squaret=int64#4,>zq4=int64#8
  3658. # asm 2: mov <squaret=%rcx,>zq4=%r10
  3659. mov %rcx,%r10
  3660. # qhasm: (uint64) squaret >>= 51
  3661. # asm 1: shr $51,<squaret=int64#4
  3662. # asm 2: shr $51,<squaret=%rcx
  3663. shr $51,%rcx
  3664. # qhasm: squaret *= 19
  3665. # asm 1: imulq $19,<squaret=int64#4,>squaret=int64#4
  3666. # asm 2: imulq $19,<squaret=%rcx,>squaret=%rcx
  3667. imulq $19,%rcx,%rcx
  3668. # qhasm: zq0 += squaret
  3669. # asm 1: add <squaret=int64#4,<zq0=int64#2
  3670. # asm 2: add <squaret=%rcx,<zq0=%rsi
  3671. add %rcx,%rsi
  3672. # qhasm: zq4 &= squareredmask
  3673. # asm 1: and <squareredmask=int64#3,<zq4=int64#8
  3674. # asm 2: and <squareredmask=%rdx,<zq4=%r10
  3675. and %rdx,%r10
  3676. # qhasm: *(uint64 *)(workp + 160) = zq0
  3677. # asm 1: movq <zq0=int64#2,160(<workp=int64#1)
  3678. # asm 2: movq <zq0=%rsi,160(<workp=%rdi)
  3679. movq %rsi,160(%rdi)
  3680. # qhasm: *(uint64 *)(workp + 168) = zq1
  3681. # asm 1: movq <zq1=int64#5,168(<workp=int64#1)
  3682. # asm 2: movq <zq1=%r8,168(<workp=%rdi)
  3683. movq %r8,168(%rdi)
  3684. # qhasm: *(uint64 *)(workp + 176) = zq2
  3685. # asm 1: movq <zq2=int64#6,176(<workp=int64#1)
  3686. # asm 2: movq <zq2=%r9,176(<workp=%rdi)
  3687. movq %r9,176(%rdi)
  3688. # qhasm: *(uint64 *)(workp + 184) = zq3
  3689. # asm 1: movq <zq3=int64#7,184(<workp=int64#1)
  3690. # asm 2: movq <zq3=%rax,184(<workp=%rdi)
  3691. movq %rax,184(%rdi)
  3692. # qhasm: *(uint64 *)(workp + 192) = zq4
  3693. # asm 1: movq <zq4=int64#8,192(<workp=int64#1)
  3694. # asm 2: movq <zq4=%r10,192(<workp=%rdi)
  3695. movq %r10,192(%rdi)
  3696. # qhasm: mulrax = *(uint64 *)(workp + 184)
  3697. # asm 1: movq 184(<workp=int64#1),>mulrax=int64#2
  3698. # asm 2: movq 184(<workp=%rdi),>mulrax=%rsi
  3699. movq 184(%rdi),%rsi
  3700. # qhasm: mulrax *= 19
  3701. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  3702. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  3703. imulq $19,%rsi,%rax
  3704. # qhasm: mulx319_stack = mulrax
  3705. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  3706. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  3707. movq %rax,56(%rsp)
  3708. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 16)
  3709. # asm 1: mulq 16(<workp=int64#1)
  3710. # asm 2: mulq 16(<workp=%rdi)
  3711. mulq 16(%rdi)
  3712. # qhasm: zq0 = mulrax
  3713. # asm 1: mov <mulrax=int64#7,>zq0=int64#2
  3714. # asm 2: mov <mulrax=%rax,>zq0=%rsi
  3715. mov %rax,%rsi
  3716. # qhasm: mulr01 = mulrdx
  3717. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  3718. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  3719. mov %rdx,%rcx
  3720. # qhasm: mulrax = *(uint64 *)(workp + 192)
  3721. # asm 1: movq 192(<workp=int64#1),>mulrax=int64#3
  3722. # asm 2: movq 192(<workp=%rdi),>mulrax=%rdx
  3723. movq 192(%rdi),%rdx
  3724. # qhasm: mulrax *= 19
  3725. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3726. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3727. imulq $19,%rdx,%rax
  3728. # qhasm: mulx419_stack = mulrax
  3729. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  3730. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  3731. movq %rax,64(%rsp)
  3732. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 8)
  3733. # asm 1: mulq 8(<workp=int64#1)
  3734. # asm 2: mulq 8(<workp=%rdi)
  3735. mulq 8(%rdi)
  3736. # qhasm: carry? zq0 += mulrax
  3737. # asm 1: add <mulrax=int64#7,<zq0=int64#2
  3738. # asm 2: add <mulrax=%rax,<zq0=%rsi
  3739. add %rax,%rsi
  3740. # qhasm: mulr01 += mulrdx + carry
  3741. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3742. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3743. adc %rdx,%rcx
  3744. # qhasm: mulrax = *(uint64 *)(workp + 160)
  3745. # asm 1: movq 160(<workp=int64#1),>mulrax=int64#7
  3746. # asm 2: movq 160(<workp=%rdi),>mulrax=%rax
  3747. movq 160(%rdi),%rax
  3748. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 0)
  3749. # asm 1: mulq 0(<workp=int64#1)
  3750. # asm 2: mulq 0(<workp=%rdi)
  3751. mulq 0(%rdi)
  3752. # qhasm: carry? zq0 += mulrax
  3753. # asm 1: add <mulrax=int64#7,<zq0=int64#2
  3754. # asm 2: add <mulrax=%rax,<zq0=%rsi
  3755. add %rax,%rsi
  3756. # qhasm: mulr01 += mulrdx + carry
  3757. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3758. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3759. adc %rdx,%rcx
  3760. # qhasm: mulrax = *(uint64 *)(workp + 160)
  3761. # asm 1: movq 160(<workp=int64#1),>mulrax=int64#7
  3762. # asm 2: movq 160(<workp=%rdi),>mulrax=%rax
  3763. movq 160(%rdi),%rax
  3764. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 8)
  3765. # asm 1: mulq 8(<workp=int64#1)
  3766. # asm 2: mulq 8(<workp=%rdi)
  3767. mulq 8(%rdi)
  3768. # qhasm: zq1 = mulrax
  3769. # asm 1: mov <mulrax=int64#7,>zq1=int64#5
  3770. # asm 2: mov <mulrax=%rax,>zq1=%r8
  3771. mov %rax,%r8
  3772. # qhasm: mulr11 = mulrdx
  3773. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  3774. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  3775. mov %rdx,%r9
  3776. # qhasm: mulrax = *(uint64 *)(workp + 160)
  3777. # asm 1: movq 160(<workp=int64#1),>mulrax=int64#7
  3778. # asm 2: movq 160(<workp=%rdi),>mulrax=%rax
  3779. movq 160(%rdi),%rax
  3780. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 16)
  3781. # asm 1: mulq 16(<workp=int64#1)
  3782. # asm 2: mulq 16(<workp=%rdi)
  3783. mulq 16(%rdi)
  3784. # qhasm: zq2 = mulrax
  3785. # asm 1: mov <mulrax=int64#7,>zq2=int64#8
  3786. # asm 2: mov <mulrax=%rax,>zq2=%r10
  3787. mov %rax,%r10
  3788. # qhasm: mulr21 = mulrdx
  3789. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  3790. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  3791. mov %rdx,%r11
  3792. # qhasm: mulrax = *(uint64 *)(workp + 160)
  3793. # asm 1: movq 160(<workp=int64#1),>mulrax=int64#7
  3794. # asm 2: movq 160(<workp=%rdi),>mulrax=%rax
  3795. movq 160(%rdi),%rax
  3796. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 24)
  3797. # asm 1: mulq 24(<workp=int64#1)
  3798. # asm 2: mulq 24(<workp=%rdi)
  3799. mulq 24(%rdi)
  3800. # qhasm: zq3 = mulrax
  3801. # asm 1: mov <mulrax=int64#7,>zq3=int64#10
  3802. # asm 2: mov <mulrax=%rax,>zq3=%r12
  3803. mov %rax,%r12
  3804. # qhasm: mulr31 = mulrdx
  3805. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  3806. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  3807. mov %rdx,%r13
  3808. # qhasm: mulrax = *(uint64 *)(workp + 160)
  3809. # asm 1: movq 160(<workp=int64#1),>mulrax=int64#7
  3810. # asm 2: movq 160(<workp=%rdi),>mulrax=%rax
  3811. movq 160(%rdi),%rax
  3812. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 32)
  3813. # asm 1: mulq 32(<workp=int64#1)
  3814. # asm 2: mulq 32(<workp=%rdi)
  3815. mulq 32(%rdi)
  3816. # qhasm: zq4 = mulrax
  3817. # asm 1: mov <mulrax=int64#7,>zq4=int64#12
  3818. # asm 2: mov <mulrax=%rax,>zq4=%r14
  3819. mov %rax,%r14
  3820. # qhasm: mulr41 = mulrdx
  3821. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  3822. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  3823. mov %rdx,%r15
  3824. # qhasm: mulrax = *(uint64 *)(workp + 168)
  3825. # asm 1: movq 168(<workp=int64#1),>mulrax=int64#7
  3826. # asm 2: movq 168(<workp=%rdi),>mulrax=%rax
  3827. movq 168(%rdi),%rax
  3828. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 0)
  3829. # asm 1: mulq 0(<workp=int64#1)
  3830. # asm 2: mulq 0(<workp=%rdi)
  3831. mulq 0(%rdi)
  3832. # qhasm: carry? zq1 += mulrax
  3833. # asm 1: add <mulrax=int64#7,<zq1=int64#5
  3834. # asm 2: add <mulrax=%rax,<zq1=%r8
  3835. add %rax,%r8
  3836. # qhasm: mulr11 += mulrdx + carry
  3837. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3838. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3839. adc %rdx,%r9
  3840. # qhasm: mulrax = *(uint64 *)(workp + 168)
  3841. # asm 1: movq 168(<workp=int64#1),>mulrax=int64#7
  3842. # asm 2: movq 168(<workp=%rdi),>mulrax=%rax
  3843. movq 168(%rdi),%rax
  3844. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 8)
  3845. # asm 1: mulq 8(<workp=int64#1)
  3846. # asm 2: mulq 8(<workp=%rdi)
  3847. mulq 8(%rdi)
  3848. # qhasm: carry? zq2 += mulrax
  3849. # asm 1: add <mulrax=int64#7,<zq2=int64#8
  3850. # asm 2: add <mulrax=%rax,<zq2=%r10
  3851. add %rax,%r10
  3852. # qhasm: mulr21 += mulrdx + carry
  3853. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3854. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3855. adc %rdx,%r11
  3856. # qhasm: mulrax = *(uint64 *)(workp + 168)
  3857. # asm 1: movq 168(<workp=int64#1),>mulrax=int64#7
  3858. # asm 2: movq 168(<workp=%rdi),>mulrax=%rax
  3859. movq 168(%rdi),%rax
  3860. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 16)
  3861. # asm 1: mulq 16(<workp=int64#1)
  3862. # asm 2: mulq 16(<workp=%rdi)
  3863. mulq 16(%rdi)
  3864. # qhasm: carry? zq3 += mulrax
  3865. # asm 1: add <mulrax=int64#7,<zq3=int64#10
  3866. # asm 2: add <mulrax=%rax,<zq3=%r12
  3867. add %rax,%r12
  3868. # qhasm: mulr31 += mulrdx + carry
  3869. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3870. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3871. adc %rdx,%r13
  3872. # qhasm: mulrax = *(uint64 *)(workp + 168)
  3873. # asm 1: movq 168(<workp=int64#1),>mulrax=int64#7
  3874. # asm 2: movq 168(<workp=%rdi),>mulrax=%rax
  3875. movq 168(%rdi),%rax
  3876. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 24)
  3877. # asm 1: mulq 24(<workp=int64#1)
  3878. # asm 2: mulq 24(<workp=%rdi)
  3879. mulq 24(%rdi)
  3880. # qhasm: carry? zq4 += mulrax
  3881. # asm 1: add <mulrax=int64#7,<zq4=int64#12
  3882. # asm 2: add <mulrax=%rax,<zq4=%r14
  3883. add %rax,%r14
  3884. # qhasm: mulr41 += mulrdx + carry
  3885. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3886. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3887. adc %rdx,%r15
  3888. # qhasm: mulrax = *(uint64 *)(workp + 168)
  3889. # asm 1: movq 168(<workp=int64#1),>mulrax=int64#3
  3890. # asm 2: movq 168(<workp=%rdi),>mulrax=%rdx
  3891. movq 168(%rdi),%rdx
  3892. # qhasm: mulrax *= 19
  3893. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3894. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3895. imulq $19,%rdx,%rax
  3896. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 32)
  3897. # asm 1: mulq 32(<workp=int64#1)
  3898. # asm 2: mulq 32(<workp=%rdi)
  3899. mulq 32(%rdi)
  3900. # qhasm: carry? zq0 += mulrax
  3901. # asm 1: add <mulrax=int64#7,<zq0=int64#2
  3902. # asm 2: add <mulrax=%rax,<zq0=%rsi
  3903. add %rax,%rsi
  3904. # qhasm: mulr01 += mulrdx + carry
  3905. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3906. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3907. adc %rdx,%rcx
  3908. # qhasm: mulrax = *(uint64 *)(workp + 176)
  3909. # asm 1: movq 176(<workp=int64#1),>mulrax=int64#7
  3910. # asm 2: movq 176(<workp=%rdi),>mulrax=%rax
  3911. movq 176(%rdi),%rax
  3912. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 0)
  3913. # asm 1: mulq 0(<workp=int64#1)
  3914. # asm 2: mulq 0(<workp=%rdi)
  3915. mulq 0(%rdi)
  3916. # qhasm: carry? zq2 += mulrax
  3917. # asm 1: add <mulrax=int64#7,<zq2=int64#8
  3918. # asm 2: add <mulrax=%rax,<zq2=%r10
  3919. add %rax,%r10
  3920. # qhasm: mulr21 += mulrdx + carry
  3921. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  3922. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  3923. adc %rdx,%r11
  3924. # qhasm: mulrax = *(uint64 *)(workp + 176)
  3925. # asm 1: movq 176(<workp=int64#1),>mulrax=int64#7
  3926. # asm 2: movq 176(<workp=%rdi),>mulrax=%rax
  3927. movq 176(%rdi),%rax
  3928. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 8)
  3929. # asm 1: mulq 8(<workp=int64#1)
  3930. # asm 2: mulq 8(<workp=%rdi)
  3931. mulq 8(%rdi)
  3932. # qhasm: carry? zq3 += mulrax
  3933. # asm 1: add <mulrax=int64#7,<zq3=int64#10
  3934. # asm 2: add <mulrax=%rax,<zq3=%r12
  3935. add %rax,%r12
  3936. # qhasm: mulr31 += mulrdx + carry
  3937. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  3938. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  3939. adc %rdx,%r13
  3940. # qhasm: mulrax = *(uint64 *)(workp + 176)
  3941. # asm 1: movq 176(<workp=int64#1),>mulrax=int64#7
  3942. # asm 2: movq 176(<workp=%rdi),>mulrax=%rax
  3943. movq 176(%rdi),%rax
  3944. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 16)
  3945. # asm 1: mulq 16(<workp=int64#1)
  3946. # asm 2: mulq 16(<workp=%rdi)
  3947. mulq 16(%rdi)
  3948. # qhasm: carry? zq4 += mulrax
  3949. # asm 1: add <mulrax=int64#7,<zq4=int64#12
  3950. # asm 2: add <mulrax=%rax,<zq4=%r14
  3951. add %rax,%r14
  3952. # qhasm: mulr41 += mulrdx + carry
  3953. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  3954. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  3955. adc %rdx,%r15
  3956. # qhasm: mulrax = *(uint64 *)(workp + 176)
  3957. # asm 1: movq 176(<workp=int64#1),>mulrax=int64#3
  3958. # asm 2: movq 176(<workp=%rdi),>mulrax=%rdx
  3959. movq 176(%rdi),%rdx
  3960. # qhasm: mulrax *= 19
  3961. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3962. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3963. imulq $19,%rdx,%rax
  3964. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 24)
  3965. # asm 1: mulq 24(<workp=int64#1)
  3966. # asm 2: mulq 24(<workp=%rdi)
  3967. mulq 24(%rdi)
  3968. # qhasm: carry? zq0 += mulrax
  3969. # asm 1: add <mulrax=int64#7,<zq0=int64#2
  3970. # asm 2: add <mulrax=%rax,<zq0=%rsi
  3971. add %rax,%rsi
  3972. # qhasm: mulr01 += mulrdx + carry
  3973. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  3974. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  3975. adc %rdx,%rcx
  3976. # qhasm: mulrax = *(uint64 *)(workp + 176)
  3977. # asm 1: movq 176(<workp=int64#1),>mulrax=int64#3
  3978. # asm 2: movq 176(<workp=%rdi),>mulrax=%rdx
  3979. movq 176(%rdi),%rdx
  3980. # qhasm: mulrax *= 19
  3981. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  3982. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  3983. imulq $19,%rdx,%rax
  3984. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 32)
  3985. # asm 1: mulq 32(<workp=int64#1)
  3986. # asm 2: mulq 32(<workp=%rdi)
  3987. mulq 32(%rdi)
  3988. # qhasm: carry? zq1 += mulrax
  3989. # asm 1: add <mulrax=int64#7,<zq1=int64#5
  3990. # asm 2: add <mulrax=%rax,<zq1=%r8
  3991. add %rax,%r8
  3992. # qhasm: mulr11 += mulrdx + carry
  3993. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  3994. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  3995. adc %rdx,%r9
  3996. # qhasm: mulrax = *(uint64 *)(workp + 184)
  3997. # asm 1: movq 184(<workp=int64#1),>mulrax=int64#7
  3998. # asm 2: movq 184(<workp=%rdi),>mulrax=%rax
  3999. movq 184(%rdi),%rax
  4000. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 0)
  4001. # asm 1: mulq 0(<workp=int64#1)
  4002. # asm 2: mulq 0(<workp=%rdi)
  4003. mulq 0(%rdi)
  4004. # qhasm: carry? zq3 += mulrax
  4005. # asm 1: add <mulrax=int64#7,<zq3=int64#10
  4006. # asm 2: add <mulrax=%rax,<zq3=%r12
  4007. add %rax,%r12
  4008. # qhasm: mulr31 += mulrdx + carry
  4009. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4010. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4011. adc %rdx,%r13
  4012. # qhasm: mulrax = *(uint64 *)(workp + 184)
  4013. # asm 1: movq 184(<workp=int64#1),>mulrax=int64#7
  4014. # asm 2: movq 184(<workp=%rdi),>mulrax=%rax
  4015. movq 184(%rdi),%rax
  4016. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 8)
  4017. # asm 1: mulq 8(<workp=int64#1)
  4018. # asm 2: mulq 8(<workp=%rdi)
  4019. mulq 8(%rdi)
  4020. # qhasm: carry? zq4 += mulrax
  4021. # asm 1: add <mulrax=int64#7,<zq4=int64#12
  4022. # asm 2: add <mulrax=%rax,<zq4=%r14
  4023. add %rax,%r14
  4024. # qhasm: mulr41 += mulrdx + carry
  4025. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4026. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4027. adc %rdx,%r15
  4028. # qhasm: mulrax = mulx319_stack
  4029. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  4030. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  4031. movq 56(%rsp),%rax
  4032. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 24)
  4033. # asm 1: mulq 24(<workp=int64#1)
  4034. # asm 2: mulq 24(<workp=%rdi)
  4035. mulq 24(%rdi)
  4036. # qhasm: carry? zq1 += mulrax
  4037. # asm 1: add <mulrax=int64#7,<zq1=int64#5
  4038. # asm 2: add <mulrax=%rax,<zq1=%r8
  4039. add %rax,%r8
  4040. # qhasm: mulr11 += mulrdx + carry
  4041. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4042. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4043. adc %rdx,%r9
  4044. # qhasm: mulrax = mulx319_stack
  4045. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  4046. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  4047. movq 56(%rsp),%rax
  4048. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 32)
  4049. # asm 1: mulq 32(<workp=int64#1)
  4050. # asm 2: mulq 32(<workp=%rdi)
  4051. mulq 32(%rdi)
  4052. # qhasm: carry? zq2 += mulrax
  4053. # asm 1: add <mulrax=int64#7,<zq2=int64#8
  4054. # asm 2: add <mulrax=%rax,<zq2=%r10
  4055. add %rax,%r10
  4056. # qhasm: mulr21 += mulrdx + carry
  4057. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4058. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4059. adc %rdx,%r11
  4060. # qhasm: mulrax = *(uint64 *)(workp + 192)
  4061. # asm 1: movq 192(<workp=int64#1),>mulrax=int64#7
  4062. # asm 2: movq 192(<workp=%rdi),>mulrax=%rax
  4063. movq 192(%rdi),%rax
  4064. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 0)
  4065. # asm 1: mulq 0(<workp=int64#1)
  4066. # asm 2: mulq 0(<workp=%rdi)
  4067. mulq 0(%rdi)
  4068. # qhasm: carry? zq4 += mulrax
  4069. # asm 1: add <mulrax=int64#7,<zq4=int64#12
  4070. # asm 2: add <mulrax=%rax,<zq4=%r14
  4071. add %rax,%r14
  4072. # qhasm: mulr41 += mulrdx + carry
  4073. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4074. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4075. adc %rdx,%r15
  4076. # qhasm: mulrax = mulx419_stack
  4077. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4078. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4079. movq 64(%rsp),%rax
  4080. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 16)
  4081. # asm 1: mulq 16(<workp=int64#1)
  4082. # asm 2: mulq 16(<workp=%rdi)
  4083. mulq 16(%rdi)
  4084. # qhasm: carry? zq1 += mulrax
  4085. # asm 1: add <mulrax=int64#7,<zq1=int64#5
  4086. # asm 2: add <mulrax=%rax,<zq1=%r8
  4087. add %rax,%r8
  4088. # qhasm: mulr11 += mulrdx + carry
  4089. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4090. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4091. adc %rdx,%r9
  4092. # qhasm: mulrax = mulx419_stack
  4093. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4094. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4095. movq 64(%rsp),%rax
  4096. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 24)
  4097. # asm 1: mulq 24(<workp=int64#1)
  4098. # asm 2: mulq 24(<workp=%rdi)
  4099. mulq 24(%rdi)
  4100. # qhasm: carry? zq2 += mulrax
  4101. # asm 1: add <mulrax=int64#7,<zq2=int64#8
  4102. # asm 2: add <mulrax=%rax,<zq2=%r10
  4103. add %rax,%r10
  4104. # qhasm: mulr21 += mulrdx + carry
  4105. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4106. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4107. adc %rdx,%r11
  4108. # qhasm: mulrax = mulx419_stack
  4109. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4110. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4111. movq 64(%rsp),%rax
  4112. # qhasm: (uint128) mulrdx mulrax = mulrax * *(uint64 *)(workp + 32)
  4113. # asm 1: mulq 32(<workp=int64#1)
  4114. # asm 2: mulq 32(<workp=%rdi)
  4115. mulq 32(%rdi)
  4116. # qhasm: carry? zq3 += mulrax
  4117. # asm 1: add <mulrax=int64#7,<zq3=int64#10
  4118. # asm 2: add <mulrax=%rax,<zq3=%r12
  4119. add %rax,%r12
  4120. # qhasm: mulr31 += mulrdx + carry
  4121. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4122. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4123. adc %rdx,%r13
  4124. # qhasm: mulredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  4125. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=int64#3
  4126. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=%rdx
  4127. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  4128. # qhasm: mulr01 = (mulr01.zq0) << 13
  4129. # asm 1: shld $13,<zq0=int64#2,<mulr01=int64#4
  4130. # asm 2: shld $13,<zq0=%rsi,<mulr01=%rcx
  4131. shld $13,%rsi,%rcx
  4132. # qhasm: zq0 &= mulredmask
  4133. # asm 1: and <mulredmask=int64#3,<zq0=int64#2
  4134. # asm 2: and <mulredmask=%rdx,<zq0=%rsi
  4135. and %rdx,%rsi
  4136. # qhasm: mulr11 = (mulr11.zq1) << 13
  4137. # asm 1: shld $13,<zq1=int64#5,<mulr11=int64#6
  4138. # asm 2: shld $13,<zq1=%r8,<mulr11=%r9
  4139. shld $13,%r8,%r9
  4140. # qhasm: zq1 &= mulredmask
  4141. # asm 1: and <mulredmask=int64#3,<zq1=int64#5
  4142. # asm 2: and <mulredmask=%rdx,<zq1=%r8
  4143. and %rdx,%r8
  4144. # qhasm: zq1 += mulr01
  4145. # asm 1: add <mulr01=int64#4,<zq1=int64#5
  4146. # asm 2: add <mulr01=%rcx,<zq1=%r8
  4147. add %rcx,%r8
  4148. # qhasm: mulr21 = (mulr21.zq2) << 13
  4149. # asm 1: shld $13,<zq2=int64#8,<mulr21=int64#9
  4150. # asm 2: shld $13,<zq2=%r10,<mulr21=%r11
  4151. shld $13,%r10,%r11
  4152. # qhasm: zq2 &= mulredmask
  4153. # asm 1: and <mulredmask=int64#3,<zq2=int64#8
  4154. # asm 2: and <mulredmask=%rdx,<zq2=%r10
  4155. and %rdx,%r10
  4156. # qhasm: zq2 += mulr11
  4157. # asm 1: add <mulr11=int64#6,<zq2=int64#8
  4158. # asm 2: add <mulr11=%r9,<zq2=%r10
  4159. add %r9,%r10
  4160. # qhasm: mulr31 = (mulr31.zq3) << 13
  4161. # asm 1: shld $13,<zq3=int64#10,<mulr31=int64#11
  4162. # asm 2: shld $13,<zq3=%r12,<mulr31=%r13
  4163. shld $13,%r12,%r13
  4164. # qhasm: zq3 &= mulredmask
  4165. # asm 1: and <mulredmask=int64#3,<zq3=int64#10
  4166. # asm 2: and <mulredmask=%rdx,<zq3=%r12
  4167. and %rdx,%r12
  4168. # qhasm: zq3 += mulr21
  4169. # asm 1: add <mulr21=int64#9,<zq3=int64#10
  4170. # asm 2: add <mulr21=%r11,<zq3=%r12
  4171. add %r11,%r12
  4172. # qhasm: mulr41 = (mulr41.zq4) << 13
  4173. # asm 1: shld $13,<zq4=int64#12,<mulr41=int64#13
  4174. # asm 2: shld $13,<zq4=%r14,<mulr41=%r15
  4175. shld $13,%r14,%r15
  4176. # qhasm: zq4 &= mulredmask
  4177. # asm 1: and <mulredmask=int64#3,<zq4=int64#12
  4178. # asm 2: and <mulredmask=%rdx,<zq4=%r14
  4179. and %rdx,%r14
  4180. # qhasm: zq4 += mulr31
  4181. # asm 1: add <mulr31=int64#11,<zq4=int64#12
  4182. # asm 2: add <mulr31=%r13,<zq4=%r14
  4183. add %r13,%r14
  4184. # qhasm: mulr41 = mulr41 * 19
  4185. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  4186. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  4187. imulq $19,%r15,%rcx
  4188. # qhasm: zq0 += mulr41
  4189. # asm 1: add <mulr41=int64#4,<zq0=int64#2
  4190. # asm 2: add <mulr41=%rcx,<zq0=%rsi
  4191. add %rcx,%rsi
  4192. # qhasm: mult = zq0
  4193. # asm 1: mov <zq0=int64#2,>mult=int64#4
  4194. # asm 2: mov <zq0=%rsi,>mult=%rcx
  4195. mov %rsi,%rcx
  4196. # qhasm: (uint64) mult >>= 51
  4197. # asm 1: shr $51,<mult=int64#4
  4198. # asm 2: shr $51,<mult=%rcx
  4199. shr $51,%rcx
  4200. # qhasm: mult += zq1
  4201. # asm 1: add <zq1=int64#5,<mult=int64#4
  4202. # asm 2: add <zq1=%r8,<mult=%rcx
  4203. add %r8,%rcx
  4204. # qhasm: zq1 = mult
  4205. # asm 1: mov <mult=int64#4,>zq1=int64#5
  4206. # asm 2: mov <mult=%rcx,>zq1=%r8
  4207. mov %rcx,%r8
  4208. # qhasm: (uint64) mult >>= 51
  4209. # asm 1: shr $51,<mult=int64#4
  4210. # asm 2: shr $51,<mult=%rcx
  4211. shr $51,%rcx
  4212. # qhasm: zq0 &= mulredmask
  4213. # asm 1: and <mulredmask=int64#3,<zq0=int64#2
  4214. # asm 2: and <mulredmask=%rdx,<zq0=%rsi
  4215. and %rdx,%rsi
  4216. # qhasm: mult += zq2
  4217. # asm 1: add <zq2=int64#8,<mult=int64#4
  4218. # asm 2: add <zq2=%r10,<mult=%rcx
  4219. add %r10,%rcx
  4220. # qhasm: zq2 = mult
  4221. # asm 1: mov <mult=int64#4,>zq2=int64#6
  4222. # asm 2: mov <mult=%rcx,>zq2=%r9
  4223. mov %rcx,%r9
  4224. # qhasm: (uint64) mult >>= 51
  4225. # asm 1: shr $51,<mult=int64#4
  4226. # asm 2: shr $51,<mult=%rcx
  4227. shr $51,%rcx
  4228. # qhasm: zq1 &= mulredmask
  4229. # asm 1: and <mulredmask=int64#3,<zq1=int64#5
  4230. # asm 2: and <mulredmask=%rdx,<zq1=%r8
  4231. and %rdx,%r8
  4232. # qhasm: mult += zq3
  4233. # asm 1: add <zq3=int64#10,<mult=int64#4
  4234. # asm 2: add <zq3=%r12,<mult=%rcx
  4235. add %r12,%rcx
  4236. # qhasm: zq3 = mult
  4237. # asm 1: mov <mult=int64#4,>zq3=int64#7
  4238. # asm 2: mov <mult=%rcx,>zq3=%rax
  4239. mov %rcx,%rax
  4240. # qhasm: (uint64) mult >>= 51
  4241. # asm 1: shr $51,<mult=int64#4
  4242. # asm 2: shr $51,<mult=%rcx
  4243. shr $51,%rcx
  4244. # qhasm: zq2 &= mulredmask
  4245. # asm 1: and <mulredmask=int64#3,<zq2=int64#6
  4246. # asm 2: and <mulredmask=%rdx,<zq2=%r9
  4247. and %rdx,%r9
  4248. # qhasm: mult += zq4
  4249. # asm 1: add <zq4=int64#12,<mult=int64#4
  4250. # asm 2: add <zq4=%r14,<mult=%rcx
  4251. add %r14,%rcx
  4252. # qhasm: zq4 = mult
  4253. # asm 1: mov <mult=int64#4,>zq4=int64#8
  4254. # asm 2: mov <mult=%rcx,>zq4=%r10
  4255. mov %rcx,%r10
  4256. # qhasm: (uint64) mult >>= 51
  4257. # asm 1: shr $51,<mult=int64#4
  4258. # asm 2: shr $51,<mult=%rcx
  4259. shr $51,%rcx
  4260. # qhasm: zq3 &= mulredmask
  4261. # asm 1: and <mulredmask=int64#3,<zq3=int64#7
  4262. # asm 2: and <mulredmask=%rdx,<zq3=%rax
  4263. and %rdx,%rax
  4264. # qhasm: mult *= 19
  4265. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  4266. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  4267. imulq $19,%rcx,%rcx
  4268. # qhasm: zq0 += mult
  4269. # asm 1: add <mult=int64#4,<zq0=int64#2
  4270. # asm 2: add <mult=%rcx,<zq0=%rsi
  4271. add %rcx,%rsi
  4272. # qhasm: zq4 &= mulredmask
  4273. # asm 1: and <mulredmask=int64#3,<zq4=int64#8
  4274. # asm 2: and <mulredmask=%rdx,<zq4=%r10
  4275. and %rdx,%r10
  4276. # qhasm: *(uint64 *)(workp + 160) = zq0
  4277. # asm 1: movq <zq0=int64#2,160(<workp=int64#1)
  4278. # asm 2: movq <zq0=%rsi,160(<workp=%rdi)
  4279. movq %rsi,160(%rdi)
  4280. # qhasm: *(uint64 *)(workp + 168) = zq1
  4281. # asm 1: movq <zq1=int64#5,168(<workp=int64#1)
  4282. # asm 2: movq <zq1=%r8,168(<workp=%rdi)
  4283. movq %r8,168(%rdi)
  4284. # qhasm: *(uint64 *)(workp + 176) = zq2
  4285. # asm 1: movq <zq2=int64#6,176(<workp=int64#1)
  4286. # asm 2: movq <zq2=%r9,176(<workp=%rdi)
  4287. movq %r9,176(%rdi)
  4288. # qhasm: *(uint64 *)(workp + 184) = zq3
  4289. # asm 1: movq <zq3=int64#7,184(<workp=int64#1)
  4290. # asm 2: movq <zq3=%rax,184(<workp=%rdi)
  4291. movq %rax,184(%rdi)
  4292. # qhasm: *(uint64 *)(workp + 192) = zq4
  4293. # asm 1: movq <zq4=int64#8,192(<workp=int64#1)
  4294. # asm 2: movq <zq4=%r10,192(<workp=%rdi)
  4295. movq %r10,192(%rdi)
  4296. # qhasm: mulrax = t63_stack
  4297. # asm 1: movq <t63_stack=stack64#26,>mulrax=int64#2
  4298. # asm 2: movq <t63_stack=200(%rsp),>mulrax=%rsi
  4299. movq 200(%rsp),%rsi
  4300. # qhasm: mulrax *= 19
  4301. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  4302. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  4303. imulq $19,%rsi,%rax
  4304. # qhasm: mulx319_stack = mulrax
  4305. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  4306. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  4307. movq %rax,56(%rsp)
  4308. # qhasm: (uint128) mulrdx mulrax = mulrax * t72_stack
  4309. # asm 1: mulq <t72_stack=stack64#20
  4310. # asm 2: mulq <t72_stack=152(%rsp)
  4311. mulq 152(%rsp)
  4312. # qhasm: xp0 = mulrax
  4313. # asm 1: mov <mulrax=int64#7,>xp0=int64#2
  4314. # asm 2: mov <mulrax=%rax,>xp0=%rsi
  4315. mov %rax,%rsi
  4316. # qhasm: mulr01 = mulrdx
  4317. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  4318. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  4319. mov %rdx,%rcx
  4320. # qhasm: mulrax = t64_stack
  4321. # asm 1: movq <t64_stack=stack64#27,>mulrax=int64#3
  4322. # asm 2: movq <t64_stack=208(%rsp),>mulrax=%rdx
  4323. movq 208(%rsp),%rdx
  4324. # qhasm: mulrax *= 19
  4325. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4326. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4327. imulq $19,%rdx,%rax
  4328. # qhasm: mulx419_stack = mulrax
  4329. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  4330. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  4331. movq %rax,64(%rsp)
  4332. # qhasm: (uint128) mulrdx mulrax = mulrax * t71_stack
  4333. # asm 1: mulq <t71_stack=stack64#19
  4334. # asm 2: mulq <t71_stack=144(%rsp)
  4335. mulq 144(%rsp)
  4336. # qhasm: carry? xp0 += mulrax
  4337. # asm 1: add <mulrax=int64#7,<xp0=int64#2
  4338. # asm 2: add <mulrax=%rax,<xp0=%rsi
  4339. add %rax,%rsi
  4340. # qhasm: mulr01 += mulrdx + carry
  4341. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4342. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4343. adc %rdx,%rcx
  4344. # qhasm: mulrax = t60_stack
  4345. # asm 1: movq <t60_stack=stack64#23,>mulrax=int64#7
  4346. # asm 2: movq <t60_stack=176(%rsp),>mulrax=%rax
  4347. movq 176(%rsp),%rax
  4348. # qhasm: (uint128) mulrdx mulrax = mulrax * t70_stack
  4349. # asm 1: mulq <t70_stack=stack64#18
  4350. # asm 2: mulq <t70_stack=136(%rsp)
  4351. mulq 136(%rsp)
  4352. # qhasm: carry? xp0 += mulrax
  4353. # asm 1: add <mulrax=int64#7,<xp0=int64#2
  4354. # asm 2: add <mulrax=%rax,<xp0=%rsi
  4355. add %rax,%rsi
  4356. # qhasm: mulr01 += mulrdx + carry
  4357. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4358. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4359. adc %rdx,%rcx
  4360. # qhasm: mulrax = t60_stack
  4361. # asm 1: movq <t60_stack=stack64#23,>mulrax=int64#7
  4362. # asm 2: movq <t60_stack=176(%rsp),>mulrax=%rax
  4363. movq 176(%rsp),%rax
  4364. # qhasm: (uint128) mulrdx mulrax = mulrax * t71_stack
  4365. # asm 1: mulq <t71_stack=stack64#19
  4366. # asm 2: mulq <t71_stack=144(%rsp)
  4367. mulq 144(%rsp)
  4368. # qhasm: xp1 = mulrax
  4369. # asm 1: mov <mulrax=int64#7,>xp1=int64#5
  4370. # asm 2: mov <mulrax=%rax,>xp1=%r8
  4371. mov %rax,%r8
  4372. # qhasm: mulr11 = mulrdx
  4373. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  4374. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  4375. mov %rdx,%r9
  4376. # qhasm: mulrax = t60_stack
  4377. # asm 1: movq <t60_stack=stack64#23,>mulrax=int64#7
  4378. # asm 2: movq <t60_stack=176(%rsp),>mulrax=%rax
  4379. movq 176(%rsp),%rax
  4380. # qhasm: (uint128) mulrdx mulrax = mulrax * t72_stack
  4381. # asm 1: mulq <t72_stack=stack64#20
  4382. # asm 2: mulq <t72_stack=152(%rsp)
  4383. mulq 152(%rsp)
  4384. # qhasm: xp2 = mulrax
  4385. # asm 1: mov <mulrax=int64#7,>xp2=int64#8
  4386. # asm 2: mov <mulrax=%rax,>xp2=%r10
  4387. mov %rax,%r10
  4388. # qhasm: mulr21 = mulrdx
  4389. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  4390. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  4391. mov %rdx,%r11
  4392. # qhasm: mulrax = t60_stack
  4393. # asm 1: movq <t60_stack=stack64#23,>mulrax=int64#7
  4394. # asm 2: movq <t60_stack=176(%rsp),>mulrax=%rax
  4395. movq 176(%rsp),%rax
  4396. # qhasm: (uint128) mulrdx mulrax = mulrax * t73_stack
  4397. # asm 1: mulq <t73_stack=stack64#21
  4398. # asm 2: mulq <t73_stack=160(%rsp)
  4399. mulq 160(%rsp)
  4400. # qhasm: xp3 = mulrax
  4401. # asm 1: mov <mulrax=int64#7,>xp3=int64#10
  4402. # asm 2: mov <mulrax=%rax,>xp3=%r12
  4403. mov %rax,%r12
  4404. # qhasm: mulr31 = mulrdx
  4405. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  4406. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  4407. mov %rdx,%r13
  4408. # qhasm: mulrax = t60_stack
  4409. # asm 1: movq <t60_stack=stack64#23,>mulrax=int64#7
  4410. # asm 2: movq <t60_stack=176(%rsp),>mulrax=%rax
  4411. movq 176(%rsp),%rax
  4412. # qhasm: (uint128) mulrdx mulrax = mulrax * t74_stack
  4413. # asm 1: mulq <t74_stack=stack64#22
  4414. # asm 2: mulq <t74_stack=168(%rsp)
  4415. mulq 168(%rsp)
  4416. # qhasm: xp4 = mulrax
  4417. # asm 1: mov <mulrax=int64#7,>xp4=int64#12
  4418. # asm 2: mov <mulrax=%rax,>xp4=%r14
  4419. mov %rax,%r14
  4420. # qhasm: mulr41 = mulrdx
  4421. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  4422. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  4423. mov %rdx,%r15
  4424. # qhasm: mulrax = t61_stack
  4425. # asm 1: movq <t61_stack=stack64#24,>mulrax=int64#7
  4426. # asm 2: movq <t61_stack=184(%rsp),>mulrax=%rax
  4427. movq 184(%rsp),%rax
  4428. # qhasm: (uint128) mulrdx mulrax = mulrax * t70_stack
  4429. # asm 1: mulq <t70_stack=stack64#18
  4430. # asm 2: mulq <t70_stack=136(%rsp)
  4431. mulq 136(%rsp)
  4432. # qhasm: carry? xp1 += mulrax
  4433. # asm 1: add <mulrax=int64#7,<xp1=int64#5
  4434. # asm 2: add <mulrax=%rax,<xp1=%r8
  4435. add %rax,%r8
  4436. # qhasm: mulr11 += mulrdx + carry
  4437. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4438. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4439. adc %rdx,%r9
  4440. # qhasm: mulrax = t61_stack
  4441. # asm 1: movq <t61_stack=stack64#24,>mulrax=int64#7
  4442. # asm 2: movq <t61_stack=184(%rsp),>mulrax=%rax
  4443. movq 184(%rsp),%rax
  4444. # qhasm: (uint128) mulrdx mulrax = mulrax * t71_stack
  4445. # asm 1: mulq <t71_stack=stack64#19
  4446. # asm 2: mulq <t71_stack=144(%rsp)
  4447. mulq 144(%rsp)
  4448. # qhasm: carry? xp2 += mulrax
  4449. # asm 1: add <mulrax=int64#7,<xp2=int64#8
  4450. # asm 2: add <mulrax=%rax,<xp2=%r10
  4451. add %rax,%r10
  4452. # qhasm: mulr21 += mulrdx + carry
  4453. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4454. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4455. adc %rdx,%r11
  4456. # qhasm: mulrax = t61_stack
  4457. # asm 1: movq <t61_stack=stack64#24,>mulrax=int64#7
  4458. # asm 2: movq <t61_stack=184(%rsp),>mulrax=%rax
  4459. movq 184(%rsp),%rax
  4460. # qhasm: (uint128) mulrdx mulrax = mulrax * t72_stack
  4461. # asm 1: mulq <t72_stack=stack64#20
  4462. # asm 2: mulq <t72_stack=152(%rsp)
  4463. mulq 152(%rsp)
  4464. # qhasm: carry? xp3 += mulrax
  4465. # asm 1: add <mulrax=int64#7,<xp3=int64#10
  4466. # asm 2: add <mulrax=%rax,<xp3=%r12
  4467. add %rax,%r12
  4468. # qhasm: mulr31 += mulrdx + carry
  4469. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4470. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4471. adc %rdx,%r13
  4472. # qhasm: mulrax = t61_stack
  4473. # asm 1: movq <t61_stack=stack64#24,>mulrax=int64#7
  4474. # asm 2: movq <t61_stack=184(%rsp),>mulrax=%rax
  4475. movq 184(%rsp),%rax
  4476. # qhasm: (uint128) mulrdx mulrax = mulrax * t73_stack
  4477. # asm 1: mulq <t73_stack=stack64#21
  4478. # asm 2: mulq <t73_stack=160(%rsp)
  4479. mulq 160(%rsp)
  4480. # qhasm: carry? xp4 += mulrax
  4481. # asm 1: add <mulrax=int64#7,<xp4=int64#12
  4482. # asm 2: add <mulrax=%rax,<xp4=%r14
  4483. add %rax,%r14
  4484. # qhasm: mulr41 += mulrdx + carry
  4485. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4486. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4487. adc %rdx,%r15
  4488. # qhasm: mulrax = t61_stack
  4489. # asm 1: movq <t61_stack=stack64#24,>mulrax=int64#3
  4490. # asm 2: movq <t61_stack=184(%rsp),>mulrax=%rdx
  4491. movq 184(%rsp),%rdx
  4492. # qhasm: mulrax *= 19
  4493. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4494. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4495. imulq $19,%rdx,%rax
  4496. # qhasm: (uint128) mulrdx mulrax = mulrax * t74_stack
  4497. # asm 1: mulq <t74_stack=stack64#22
  4498. # asm 2: mulq <t74_stack=168(%rsp)
  4499. mulq 168(%rsp)
  4500. # qhasm: carry? xp0 += mulrax
  4501. # asm 1: add <mulrax=int64#7,<xp0=int64#2
  4502. # asm 2: add <mulrax=%rax,<xp0=%rsi
  4503. add %rax,%rsi
  4504. # qhasm: mulr01 += mulrdx + carry
  4505. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4506. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4507. adc %rdx,%rcx
  4508. # qhasm: mulrax = t62_stack
  4509. # asm 1: movq <t62_stack=stack64#25,>mulrax=int64#7
  4510. # asm 2: movq <t62_stack=192(%rsp),>mulrax=%rax
  4511. movq 192(%rsp),%rax
  4512. # qhasm: (uint128) mulrdx mulrax = mulrax * t70_stack
  4513. # asm 1: mulq <t70_stack=stack64#18
  4514. # asm 2: mulq <t70_stack=136(%rsp)
  4515. mulq 136(%rsp)
  4516. # qhasm: carry? xp2 += mulrax
  4517. # asm 1: add <mulrax=int64#7,<xp2=int64#8
  4518. # asm 2: add <mulrax=%rax,<xp2=%r10
  4519. add %rax,%r10
  4520. # qhasm: mulr21 += mulrdx + carry
  4521. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4522. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4523. adc %rdx,%r11
  4524. # qhasm: mulrax = t62_stack
  4525. # asm 1: movq <t62_stack=stack64#25,>mulrax=int64#7
  4526. # asm 2: movq <t62_stack=192(%rsp),>mulrax=%rax
  4527. movq 192(%rsp),%rax
  4528. # qhasm: (uint128) mulrdx mulrax = mulrax * t71_stack
  4529. # asm 1: mulq <t71_stack=stack64#19
  4530. # asm 2: mulq <t71_stack=144(%rsp)
  4531. mulq 144(%rsp)
  4532. # qhasm: carry? xp3 += mulrax
  4533. # asm 1: add <mulrax=int64#7,<xp3=int64#10
  4534. # asm 2: add <mulrax=%rax,<xp3=%r12
  4535. add %rax,%r12
  4536. # qhasm: mulr31 += mulrdx + carry
  4537. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4538. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4539. adc %rdx,%r13
  4540. # qhasm: mulrax = t62_stack
  4541. # asm 1: movq <t62_stack=stack64#25,>mulrax=int64#7
  4542. # asm 2: movq <t62_stack=192(%rsp),>mulrax=%rax
  4543. movq 192(%rsp),%rax
  4544. # qhasm: (uint128) mulrdx mulrax = mulrax * t72_stack
  4545. # asm 1: mulq <t72_stack=stack64#20
  4546. # asm 2: mulq <t72_stack=152(%rsp)
  4547. mulq 152(%rsp)
  4548. # qhasm: carry? xp4 += mulrax
  4549. # asm 1: add <mulrax=int64#7,<xp4=int64#12
  4550. # asm 2: add <mulrax=%rax,<xp4=%r14
  4551. add %rax,%r14
  4552. # qhasm: mulr41 += mulrdx + carry
  4553. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4554. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4555. adc %rdx,%r15
  4556. # qhasm: mulrax = t62_stack
  4557. # asm 1: movq <t62_stack=stack64#25,>mulrax=int64#3
  4558. # asm 2: movq <t62_stack=192(%rsp),>mulrax=%rdx
  4559. movq 192(%rsp),%rdx
  4560. # qhasm: mulrax *= 19
  4561. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4562. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4563. imulq $19,%rdx,%rax
  4564. # qhasm: (uint128) mulrdx mulrax = mulrax * t73_stack
  4565. # asm 1: mulq <t73_stack=stack64#21
  4566. # asm 2: mulq <t73_stack=160(%rsp)
  4567. mulq 160(%rsp)
  4568. # qhasm: carry? xp0 += mulrax
  4569. # asm 1: add <mulrax=int64#7,<xp0=int64#2
  4570. # asm 2: add <mulrax=%rax,<xp0=%rsi
  4571. add %rax,%rsi
  4572. # qhasm: mulr01 += mulrdx + carry
  4573. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  4574. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  4575. adc %rdx,%rcx
  4576. # qhasm: mulrax = t62_stack
  4577. # asm 1: movq <t62_stack=stack64#25,>mulrax=int64#3
  4578. # asm 2: movq <t62_stack=192(%rsp),>mulrax=%rdx
  4579. movq 192(%rsp),%rdx
  4580. # qhasm: mulrax *= 19
  4581. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  4582. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  4583. imulq $19,%rdx,%rax
  4584. # qhasm: (uint128) mulrdx mulrax = mulrax * t74_stack
  4585. # asm 1: mulq <t74_stack=stack64#22
  4586. # asm 2: mulq <t74_stack=168(%rsp)
  4587. mulq 168(%rsp)
  4588. # qhasm: carry? xp1 += mulrax
  4589. # asm 1: add <mulrax=int64#7,<xp1=int64#5
  4590. # asm 2: add <mulrax=%rax,<xp1=%r8
  4591. add %rax,%r8
  4592. # qhasm: mulr11 += mulrdx + carry
  4593. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4594. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4595. adc %rdx,%r9
  4596. # qhasm: mulrax = t63_stack
  4597. # asm 1: movq <t63_stack=stack64#26,>mulrax=int64#7
  4598. # asm 2: movq <t63_stack=200(%rsp),>mulrax=%rax
  4599. movq 200(%rsp),%rax
  4600. # qhasm: (uint128) mulrdx mulrax = mulrax * t70_stack
  4601. # asm 1: mulq <t70_stack=stack64#18
  4602. # asm 2: mulq <t70_stack=136(%rsp)
  4603. mulq 136(%rsp)
  4604. # qhasm: carry? xp3 += mulrax
  4605. # asm 1: add <mulrax=int64#7,<xp3=int64#10
  4606. # asm 2: add <mulrax=%rax,<xp3=%r12
  4607. add %rax,%r12
  4608. # qhasm: mulr31 += mulrdx + carry
  4609. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4610. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4611. adc %rdx,%r13
  4612. # qhasm: mulrax = t63_stack
  4613. # asm 1: movq <t63_stack=stack64#26,>mulrax=int64#7
  4614. # asm 2: movq <t63_stack=200(%rsp),>mulrax=%rax
  4615. movq 200(%rsp),%rax
  4616. # qhasm: (uint128) mulrdx mulrax = mulrax * t71_stack
  4617. # asm 1: mulq <t71_stack=stack64#19
  4618. # asm 2: mulq <t71_stack=144(%rsp)
  4619. mulq 144(%rsp)
  4620. # qhasm: carry? xp4 += mulrax
  4621. # asm 1: add <mulrax=int64#7,<xp4=int64#12
  4622. # asm 2: add <mulrax=%rax,<xp4=%r14
  4623. add %rax,%r14
  4624. # qhasm: mulr41 += mulrdx + carry
  4625. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4626. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4627. adc %rdx,%r15
  4628. # qhasm: mulrax = mulx319_stack
  4629. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  4630. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  4631. movq 56(%rsp),%rax
  4632. # qhasm: (uint128) mulrdx mulrax = mulrax * t73_stack
  4633. # asm 1: mulq <t73_stack=stack64#21
  4634. # asm 2: mulq <t73_stack=160(%rsp)
  4635. mulq 160(%rsp)
  4636. # qhasm: carry? xp1 += mulrax
  4637. # asm 1: add <mulrax=int64#7,<xp1=int64#5
  4638. # asm 2: add <mulrax=%rax,<xp1=%r8
  4639. add %rax,%r8
  4640. # qhasm: mulr11 += mulrdx + carry
  4641. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4642. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4643. adc %rdx,%r9
  4644. # qhasm: mulrax = mulx319_stack
  4645. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  4646. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  4647. movq 56(%rsp),%rax
  4648. # qhasm: (uint128) mulrdx mulrax = mulrax * t74_stack
  4649. # asm 1: mulq <t74_stack=stack64#22
  4650. # asm 2: mulq <t74_stack=168(%rsp)
  4651. mulq 168(%rsp)
  4652. # qhasm: carry? xp2 += mulrax
  4653. # asm 1: add <mulrax=int64#7,<xp2=int64#8
  4654. # asm 2: add <mulrax=%rax,<xp2=%r10
  4655. add %rax,%r10
  4656. # qhasm: mulr21 += mulrdx + carry
  4657. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4658. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4659. adc %rdx,%r11
  4660. # qhasm: mulrax = t64_stack
  4661. # asm 1: movq <t64_stack=stack64#27,>mulrax=int64#7
  4662. # asm 2: movq <t64_stack=208(%rsp),>mulrax=%rax
  4663. movq 208(%rsp),%rax
  4664. # qhasm: (uint128) mulrdx mulrax = mulrax * t70_stack
  4665. # asm 1: mulq <t70_stack=stack64#18
  4666. # asm 2: mulq <t70_stack=136(%rsp)
  4667. mulq 136(%rsp)
  4668. # qhasm: carry? xp4 += mulrax
  4669. # asm 1: add <mulrax=int64#7,<xp4=int64#12
  4670. # asm 2: add <mulrax=%rax,<xp4=%r14
  4671. add %rax,%r14
  4672. # qhasm: mulr41 += mulrdx + carry
  4673. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  4674. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  4675. adc %rdx,%r15
  4676. # qhasm: mulrax = mulx419_stack
  4677. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4678. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4679. movq 64(%rsp),%rax
  4680. # qhasm: (uint128) mulrdx mulrax = mulrax * t72_stack
  4681. # asm 1: mulq <t72_stack=stack64#20
  4682. # asm 2: mulq <t72_stack=152(%rsp)
  4683. mulq 152(%rsp)
  4684. # qhasm: carry? xp1 += mulrax
  4685. # asm 1: add <mulrax=int64#7,<xp1=int64#5
  4686. # asm 2: add <mulrax=%rax,<xp1=%r8
  4687. add %rax,%r8
  4688. # qhasm: mulr11 += mulrdx + carry
  4689. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  4690. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  4691. adc %rdx,%r9
  4692. # qhasm: mulrax = mulx419_stack
  4693. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4694. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4695. movq 64(%rsp),%rax
  4696. # qhasm: (uint128) mulrdx mulrax = mulrax * t73_stack
  4697. # asm 1: mulq <t73_stack=stack64#21
  4698. # asm 2: mulq <t73_stack=160(%rsp)
  4699. mulq 160(%rsp)
  4700. # qhasm: carry? xp2 += mulrax
  4701. # asm 1: add <mulrax=int64#7,<xp2=int64#8
  4702. # asm 2: add <mulrax=%rax,<xp2=%r10
  4703. add %rax,%r10
  4704. # qhasm: mulr21 += mulrdx + carry
  4705. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  4706. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  4707. adc %rdx,%r11
  4708. # qhasm: mulrax = mulx419_stack
  4709. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  4710. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  4711. movq 64(%rsp),%rax
  4712. # qhasm: (uint128) mulrdx mulrax = mulrax * t74_stack
  4713. # asm 1: mulq <t74_stack=stack64#22
  4714. # asm 2: mulq <t74_stack=168(%rsp)
  4715. mulq 168(%rsp)
  4716. # qhasm: carry? xp3 += mulrax
  4717. # asm 1: add <mulrax=int64#7,<xp3=int64#10
  4718. # asm 2: add <mulrax=%rax,<xp3=%r12
  4719. add %rax,%r12
  4720. # qhasm: mulr31 += mulrdx + carry
  4721. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  4722. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  4723. adc %rdx,%r13
  4724. # qhasm: mulredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  4725. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=int64#3
  4726. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=%rdx
  4727. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  4728. # qhasm: mulr01 = (mulr01.xp0) << 13
  4729. # asm 1: shld $13,<xp0=int64#2,<mulr01=int64#4
  4730. # asm 2: shld $13,<xp0=%rsi,<mulr01=%rcx
  4731. shld $13,%rsi,%rcx
  4732. # qhasm: xp0 &= mulredmask
  4733. # asm 1: and <mulredmask=int64#3,<xp0=int64#2
  4734. # asm 2: and <mulredmask=%rdx,<xp0=%rsi
  4735. and %rdx,%rsi
  4736. # qhasm: mulr11 = (mulr11.xp1) << 13
  4737. # asm 1: shld $13,<xp1=int64#5,<mulr11=int64#6
  4738. # asm 2: shld $13,<xp1=%r8,<mulr11=%r9
  4739. shld $13,%r8,%r9
  4740. # qhasm: xp1 &= mulredmask
  4741. # asm 1: and <mulredmask=int64#3,<xp1=int64#5
  4742. # asm 2: and <mulredmask=%rdx,<xp1=%r8
  4743. and %rdx,%r8
  4744. # qhasm: xp1 += mulr01
  4745. # asm 1: add <mulr01=int64#4,<xp1=int64#5
  4746. # asm 2: add <mulr01=%rcx,<xp1=%r8
  4747. add %rcx,%r8
  4748. # qhasm: mulr21 = (mulr21.xp2) << 13
  4749. # asm 1: shld $13,<xp2=int64#8,<mulr21=int64#9
  4750. # asm 2: shld $13,<xp2=%r10,<mulr21=%r11
  4751. shld $13,%r10,%r11
  4752. # qhasm: xp2 &= mulredmask
  4753. # asm 1: and <mulredmask=int64#3,<xp2=int64#8
  4754. # asm 2: and <mulredmask=%rdx,<xp2=%r10
  4755. and %rdx,%r10
  4756. # qhasm: xp2 += mulr11
  4757. # asm 1: add <mulr11=int64#6,<xp2=int64#8
  4758. # asm 2: add <mulr11=%r9,<xp2=%r10
  4759. add %r9,%r10
  4760. # qhasm: mulr31 = (mulr31.xp3) << 13
  4761. # asm 1: shld $13,<xp3=int64#10,<mulr31=int64#11
  4762. # asm 2: shld $13,<xp3=%r12,<mulr31=%r13
  4763. shld $13,%r12,%r13
  4764. # qhasm: xp3 &= mulredmask
  4765. # asm 1: and <mulredmask=int64#3,<xp3=int64#10
  4766. # asm 2: and <mulredmask=%rdx,<xp3=%r12
  4767. and %rdx,%r12
  4768. # qhasm: xp3 += mulr21
  4769. # asm 1: add <mulr21=int64#9,<xp3=int64#10
  4770. # asm 2: add <mulr21=%r11,<xp3=%r12
  4771. add %r11,%r12
  4772. # qhasm: mulr41 = (mulr41.xp4) << 13
  4773. # asm 1: shld $13,<xp4=int64#12,<mulr41=int64#13
  4774. # asm 2: shld $13,<xp4=%r14,<mulr41=%r15
  4775. shld $13,%r14,%r15
  4776. # qhasm: xp4 &= mulredmask
  4777. # asm 1: and <mulredmask=int64#3,<xp4=int64#12
  4778. # asm 2: and <mulredmask=%rdx,<xp4=%r14
  4779. and %rdx,%r14
  4780. # qhasm: xp4 += mulr31
  4781. # asm 1: add <mulr31=int64#11,<xp4=int64#12
  4782. # asm 2: add <mulr31=%r13,<xp4=%r14
  4783. add %r13,%r14
  4784. # qhasm: mulr41 = mulr41 * 19
  4785. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  4786. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  4787. imulq $19,%r15,%rcx
  4788. # qhasm: xp0 += mulr41
  4789. # asm 1: add <mulr41=int64#4,<xp0=int64#2
  4790. # asm 2: add <mulr41=%rcx,<xp0=%rsi
  4791. add %rcx,%rsi
  4792. # qhasm: mult = xp0
  4793. # asm 1: mov <xp0=int64#2,>mult=int64#4
  4794. # asm 2: mov <xp0=%rsi,>mult=%rcx
  4795. mov %rsi,%rcx
  4796. # qhasm: (uint64) mult >>= 51
  4797. # asm 1: shr $51,<mult=int64#4
  4798. # asm 2: shr $51,<mult=%rcx
  4799. shr $51,%rcx
  4800. # qhasm: mult += xp1
  4801. # asm 1: add <xp1=int64#5,<mult=int64#4
  4802. # asm 2: add <xp1=%r8,<mult=%rcx
  4803. add %r8,%rcx
  4804. # qhasm: xp1 = mult
  4805. # asm 1: mov <mult=int64#4,>xp1=int64#5
  4806. # asm 2: mov <mult=%rcx,>xp1=%r8
  4807. mov %rcx,%r8
  4808. # qhasm: (uint64) mult >>= 51
  4809. # asm 1: shr $51,<mult=int64#4
  4810. # asm 2: shr $51,<mult=%rcx
  4811. shr $51,%rcx
  4812. # qhasm: xp0 &= mulredmask
  4813. # asm 1: and <mulredmask=int64#3,<xp0=int64#2
  4814. # asm 2: and <mulredmask=%rdx,<xp0=%rsi
  4815. and %rdx,%rsi
  4816. # qhasm: mult += xp2
  4817. # asm 1: add <xp2=int64#8,<mult=int64#4
  4818. # asm 2: add <xp2=%r10,<mult=%rcx
  4819. add %r10,%rcx
  4820. # qhasm: xp2 = mult
  4821. # asm 1: mov <mult=int64#4,>xp2=int64#6
  4822. # asm 2: mov <mult=%rcx,>xp2=%r9
  4823. mov %rcx,%r9
  4824. # qhasm: (uint64) mult >>= 51
  4825. # asm 1: shr $51,<mult=int64#4
  4826. # asm 2: shr $51,<mult=%rcx
  4827. shr $51,%rcx
  4828. # qhasm: xp1 &= mulredmask
  4829. # asm 1: and <mulredmask=int64#3,<xp1=int64#5
  4830. # asm 2: and <mulredmask=%rdx,<xp1=%r8
  4831. and %rdx,%r8
  4832. # qhasm: mult += xp3
  4833. # asm 1: add <xp3=int64#10,<mult=int64#4
  4834. # asm 2: add <xp3=%r12,<mult=%rcx
  4835. add %r12,%rcx
  4836. # qhasm: xp3 = mult
  4837. # asm 1: mov <mult=int64#4,>xp3=int64#7
  4838. # asm 2: mov <mult=%rcx,>xp3=%rax
  4839. mov %rcx,%rax
  4840. # qhasm: (uint64) mult >>= 51
  4841. # asm 1: shr $51,<mult=int64#4
  4842. # asm 2: shr $51,<mult=%rcx
  4843. shr $51,%rcx
  4844. # qhasm: xp2 &= mulredmask
  4845. # asm 1: and <mulredmask=int64#3,<xp2=int64#6
  4846. # asm 2: and <mulredmask=%rdx,<xp2=%r9
  4847. and %rdx,%r9
  4848. # qhasm: mult += xp4
  4849. # asm 1: add <xp4=int64#12,<mult=int64#4
  4850. # asm 2: add <xp4=%r14,<mult=%rcx
  4851. add %r14,%rcx
  4852. # qhasm: xp4 = mult
  4853. # asm 1: mov <mult=int64#4,>xp4=int64#8
  4854. # asm 2: mov <mult=%rcx,>xp4=%r10
  4855. mov %rcx,%r10
  4856. # qhasm: (uint64) mult >>= 51
  4857. # asm 1: shr $51,<mult=int64#4
  4858. # asm 2: shr $51,<mult=%rcx
  4859. shr $51,%rcx
  4860. # qhasm: xp3 &= mulredmask
  4861. # asm 1: and <mulredmask=int64#3,<xp3=int64#7
  4862. # asm 2: and <mulredmask=%rdx,<xp3=%rax
  4863. and %rdx,%rax
  4864. # qhasm: mult *= 19
  4865. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  4866. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  4867. imulq $19,%rcx,%rcx
  4868. # qhasm: xp0 += mult
  4869. # asm 1: add <mult=int64#4,<xp0=int64#2
  4870. # asm 2: add <mult=%rcx,<xp0=%rsi
  4871. add %rcx,%rsi
  4872. # qhasm: xp4 &= mulredmask
  4873. # asm 1: and <mulredmask=int64#3,<xp4=int64#8
  4874. # asm 2: and <mulredmask=%rdx,<xp4=%r10
  4875. and %rdx,%r10
  4876. # qhasm: *(uint64 *)(workp + 40) = xp0
  4877. # asm 1: movq <xp0=int64#2,40(<workp=int64#1)
  4878. # asm 2: movq <xp0=%rsi,40(<workp=%rdi)
  4879. movq %rsi,40(%rdi)
  4880. # qhasm: *(uint64 *)(workp + 48) = xp1
  4881. # asm 1: movq <xp1=int64#5,48(<workp=int64#1)
  4882. # asm 2: movq <xp1=%r8,48(<workp=%rdi)
  4883. movq %r8,48(%rdi)
  4884. # qhasm: *(uint64 *)(workp + 56) = xp2
  4885. # asm 1: movq <xp2=int64#6,56(<workp=int64#1)
  4886. # asm 2: movq <xp2=%r9,56(<workp=%rdi)
  4887. movq %r9,56(%rdi)
  4888. # qhasm: *(uint64 *)(workp + 64) = xp3
  4889. # asm 1: movq <xp3=int64#7,64(<workp=int64#1)
  4890. # asm 2: movq <xp3=%rax,64(<workp=%rdi)
  4891. movq %rax,64(%rdi)
  4892. # qhasm: *(uint64 *)(workp + 72) = xp4
  4893. # asm 1: movq <xp4=int64#8,72(<workp=int64#1)
  4894. # asm 2: movq <xp4=%r10,72(<workp=%rdi)
  4895. movq %r10,72(%rdi)
  4896. # qhasm: mul121666rax = t50_stack
  4897. # asm 1: movq <t50_stack=stack64#28,>mul121666rax=int64#7
  4898. # asm 2: movq <t50_stack=216(%rsp),>mul121666rax=%rax
  4899. movq 216(%rsp),%rax
  4900. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_121666_213
  4901. mulq crypto_scalarmult_curve25519_amd64_51_121666_213
  4902. # qhasm: (uint64) mul121666rax >>= 13
  4903. # asm 1: shr $13,<mul121666rax=int64#7
  4904. # asm 2: shr $13,<mul121666rax=%rax
  4905. shr $13,%rax
  4906. # qhasm: zp0 = mul121666rax
  4907. # asm 1: mov <mul121666rax=int64#7,>zp0=int64#2
  4908. # asm 2: mov <mul121666rax=%rax,>zp0=%rsi
  4909. mov %rax,%rsi
  4910. # qhasm: zp1 = mul121666rdx
  4911. # asm 1: mov <mul121666rdx=int64#3,>zp1=int64#4
  4912. # asm 2: mov <mul121666rdx=%rdx,>zp1=%rcx
  4913. mov %rdx,%rcx
  4914. # qhasm: mul121666rax = t51_stack
  4915. # asm 1: movq <t51_stack=stack64#29,>mul121666rax=int64#7
  4916. # asm 2: movq <t51_stack=224(%rsp),>mul121666rax=%rax
  4917. movq 224(%rsp),%rax
  4918. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_121666_213
  4919. mulq crypto_scalarmult_curve25519_amd64_51_121666_213
  4920. # qhasm: (uint64) mul121666rax >>= 13
  4921. # asm 1: shr $13,<mul121666rax=int64#7
  4922. # asm 2: shr $13,<mul121666rax=%rax
  4923. shr $13,%rax
  4924. # qhasm: zp1 += mul121666rax
  4925. # asm 1: add <mul121666rax=int64#7,<zp1=int64#4
  4926. # asm 2: add <mul121666rax=%rax,<zp1=%rcx
  4927. add %rax,%rcx
  4928. # qhasm: zp2 = mul121666rdx
  4929. # asm 1: mov <mul121666rdx=int64#3,>zp2=int64#5
  4930. # asm 2: mov <mul121666rdx=%rdx,>zp2=%r8
  4931. mov %rdx,%r8
  4932. # qhasm: mul121666rax = t52_stack
  4933. # asm 1: movq <t52_stack=stack64#30,>mul121666rax=int64#7
  4934. # asm 2: movq <t52_stack=232(%rsp),>mul121666rax=%rax
  4935. movq 232(%rsp),%rax
  4936. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_121666_213
  4937. mulq crypto_scalarmult_curve25519_amd64_51_121666_213
  4938. # qhasm: (uint64) mul121666rax >>= 13
  4939. # asm 1: shr $13,<mul121666rax=int64#7
  4940. # asm 2: shr $13,<mul121666rax=%rax
  4941. shr $13,%rax
  4942. # qhasm: zp2 += mul121666rax
  4943. # asm 1: add <mul121666rax=int64#7,<zp2=int64#5
  4944. # asm 2: add <mul121666rax=%rax,<zp2=%r8
  4945. add %rax,%r8
  4946. # qhasm: zp3 = mul121666rdx
  4947. # asm 1: mov <mul121666rdx=int64#3,>zp3=int64#6
  4948. # asm 2: mov <mul121666rdx=%rdx,>zp3=%r9
  4949. mov %rdx,%r9
  4950. # qhasm: mul121666rax = t53_stack
  4951. # asm 1: movq <t53_stack=stack64#31,>mul121666rax=int64#7
  4952. # asm 2: movq <t53_stack=240(%rsp),>mul121666rax=%rax
  4953. movq 240(%rsp),%rax
  4954. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_121666_213
  4955. mulq crypto_scalarmult_curve25519_amd64_51_121666_213
  4956. # qhasm: (uint64) mul121666rax >>= 13
  4957. # asm 1: shr $13,<mul121666rax=int64#7
  4958. # asm 2: shr $13,<mul121666rax=%rax
  4959. shr $13,%rax
  4960. # qhasm: zp3 += mul121666rax
  4961. # asm 1: add <mul121666rax=int64#7,<zp3=int64#6
  4962. # asm 2: add <mul121666rax=%rax,<zp3=%r9
  4963. add %rax,%r9
  4964. # qhasm: zp4 = mul121666rdx
  4965. # asm 1: mov <mul121666rdx=int64#3,>zp4=int64#8
  4966. # asm 2: mov <mul121666rdx=%rdx,>zp4=%r10
  4967. mov %rdx,%r10
  4968. # qhasm: mul121666rax = t54_stack
  4969. # asm 1: movq <t54_stack=stack64#32,>mul121666rax=int64#7
  4970. # asm 2: movq <t54_stack=248(%rsp),>mul121666rax=%rax
  4971. movq 248(%rsp),%rax
  4972. # qhasm: (uint128) mul121666rdx mul121666rax = mul121666rax * *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_121666_213
  4973. mulq crypto_scalarmult_curve25519_amd64_51_121666_213
  4974. # qhasm: (uint64) mul121666rax >>= 13
  4975. # asm 1: shr $13,<mul121666rax=int64#7
  4976. # asm 2: shr $13,<mul121666rax=%rax
  4977. shr $13,%rax
  4978. # qhasm: zp4 += mul121666rax
  4979. # asm 1: add <mul121666rax=int64#7,<zp4=int64#8
  4980. # asm 2: add <mul121666rax=%rax,<zp4=%r10
  4981. add %rax,%r10
  4982. # qhasm: mul121666rdx *= 19
  4983. # asm 1: imulq $19,<mul121666rdx=int64#3,>mul121666rdx=int64#3
  4984. # asm 2: imulq $19,<mul121666rdx=%rdx,>mul121666rdx=%rdx
  4985. imulq $19,%rdx,%rdx
  4986. # qhasm: zp0 += mul121666rdx
  4987. # asm 1: add <mul121666rdx=int64#3,<zp0=int64#2
  4988. # asm 2: add <mul121666rdx=%rdx,<zp0=%rsi
  4989. add %rdx,%rsi
  4990. # qhasm: zp0 += t70_stack
  4991. # asm 1: addq <t70_stack=stack64#18,<zp0=int64#2
  4992. # asm 2: addq <t70_stack=136(%rsp),<zp0=%rsi
  4993. addq 136(%rsp),%rsi
  4994. # qhasm: zp1 += t71_stack
  4995. # asm 1: addq <t71_stack=stack64#19,<zp1=int64#4
  4996. # asm 2: addq <t71_stack=144(%rsp),<zp1=%rcx
  4997. addq 144(%rsp),%rcx
  4998. # qhasm: zp2 += t72_stack
  4999. # asm 1: addq <t72_stack=stack64#20,<zp2=int64#5
  5000. # asm 2: addq <t72_stack=152(%rsp),<zp2=%r8
  5001. addq 152(%rsp),%r8
  5002. # qhasm: zp3 += t73_stack
  5003. # asm 1: addq <t73_stack=stack64#21,<zp3=int64#6
  5004. # asm 2: addq <t73_stack=160(%rsp),<zp3=%r9
  5005. addq 160(%rsp),%r9
  5006. # qhasm: zp4 += t74_stack
  5007. # asm 1: addq <t74_stack=stack64#22,<zp4=int64#8
  5008. # asm 2: addq <t74_stack=168(%rsp),<zp4=%r10
  5009. addq 168(%rsp),%r10
  5010. # qhasm: *(uint64 *)(workp + 80) = zp0
  5011. # asm 1: movq <zp0=int64#2,80(<workp=int64#1)
  5012. # asm 2: movq <zp0=%rsi,80(<workp=%rdi)
  5013. movq %rsi,80(%rdi)
  5014. # qhasm: *(uint64 *)(workp + 88) = zp1
  5015. # asm 1: movq <zp1=int64#4,88(<workp=int64#1)
  5016. # asm 2: movq <zp1=%rcx,88(<workp=%rdi)
  5017. movq %rcx,88(%rdi)
  5018. # qhasm: *(uint64 *)(workp + 96) = zp2
  5019. # asm 1: movq <zp2=int64#5,96(<workp=int64#1)
  5020. # asm 2: movq <zp2=%r8,96(<workp=%rdi)
  5021. movq %r8,96(%rdi)
  5022. # qhasm: *(uint64 *)(workp + 104) = zp3
  5023. # asm 1: movq <zp3=int64#6,104(<workp=int64#1)
  5024. # asm 2: movq <zp3=%r9,104(<workp=%rdi)
  5025. movq %r9,104(%rdi)
  5026. # qhasm: *(uint64 *)(workp + 112) = zp4
  5027. # asm 1: movq <zp4=int64#8,112(<workp=int64#1)
  5028. # asm 2: movq <zp4=%r10,112(<workp=%rdi)
  5029. movq %r10,112(%rdi)
  5030. # qhasm: mulrax = *(uint64 *)(workp + 104)
  5031. # asm 1: movq 104(<workp=int64#1),>mulrax=int64#2
  5032. # asm 2: movq 104(<workp=%rdi),>mulrax=%rsi
  5033. movq 104(%rdi),%rsi
  5034. # qhasm: mulrax *= 19
  5035. # asm 1: imulq $19,<mulrax=int64#2,>mulrax=int64#7
  5036. # asm 2: imulq $19,<mulrax=%rsi,>mulrax=%rax
  5037. imulq $19,%rsi,%rax
  5038. # qhasm: mulx319_stack = mulrax
  5039. # asm 1: movq <mulrax=int64#7,>mulx319_stack=stack64#8
  5040. # asm 2: movq <mulrax=%rax,>mulx319_stack=56(%rsp)
  5041. movq %rax,56(%rsp)
  5042. # qhasm: (uint128) mulrdx mulrax = mulrax * t52_stack
  5043. # asm 1: mulq <t52_stack=stack64#30
  5044. # asm 2: mulq <t52_stack=232(%rsp)
  5045. mulq 232(%rsp)
  5046. # qhasm: zp0 = mulrax
  5047. # asm 1: mov <mulrax=int64#7,>zp0=int64#2
  5048. # asm 2: mov <mulrax=%rax,>zp0=%rsi
  5049. mov %rax,%rsi
  5050. # qhasm: mulr01 = mulrdx
  5051. # asm 1: mov <mulrdx=int64#3,>mulr01=int64#4
  5052. # asm 2: mov <mulrdx=%rdx,>mulr01=%rcx
  5053. mov %rdx,%rcx
  5054. # qhasm: mulrax = *(uint64 *)(workp + 112)
  5055. # asm 1: movq 112(<workp=int64#1),>mulrax=int64#3
  5056. # asm 2: movq 112(<workp=%rdi),>mulrax=%rdx
  5057. movq 112(%rdi),%rdx
  5058. # qhasm: mulrax *= 19
  5059. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  5060. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  5061. imulq $19,%rdx,%rax
  5062. # qhasm: mulx419_stack = mulrax
  5063. # asm 1: movq <mulrax=int64#7,>mulx419_stack=stack64#9
  5064. # asm 2: movq <mulrax=%rax,>mulx419_stack=64(%rsp)
  5065. movq %rax,64(%rsp)
  5066. # qhasm: (uint128) mulrdx mulrax = mulrax * t51_stack
  5067. # asm 1: mulq <t51_stack=stack64#29
  5068. # asm 2: mulq <t51_stack=224(%rsp)
  5069. mulq 224(%rsp)
  5070. # qhasm: carry? zp0 += mulrax
  5071. # asm 1: add <mulrax=int64#7,<zp0=int64#2
  5072. # asm 2: add <mulrax=%rax,<zp0=%rsi
  5073. add %rax,%rsi
  5074. # qhasm: mulr01 += mulrdx + carry
  5075. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  5076. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  5077. adc %rdx,%rcx
  5078. # qhasm: mulrax = *(uint64 *)(workp + 80)
  5079. # asm 1: movq 80(<workp=int64#1),>mulrax=int64#7
  5080. # asm 2: movq 80(<workp=%rdi),>mulrax=%rax
  5081. movq 80(%rdi),%rax
  5082. # qhasm: (uint128) mulrdx mulrax = mulrax * t50_stack
  5083. # asm 1: mulq <t50_stack=stack64#28
  5084. # asm 2: mulq <t50_stack=216(%rsp)
  5085. mulq 216(%rsp)
  5086. # qhasm: carry? zp0 += mulrax
  5087. # asm 1: add <mulrax=int64#7,<zp0=int64#2
  5088. # asm 2: add <mulrax=%rax,<zp0=%rsi
  5089. add %rax,%rsi
  5090. # qhasm: mulr01 += mulrdx + carry
  5091. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  5092. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  5093. adc %rdx,%rcx
  5094. # qhasm: mulrax = *(uint64 *)(workp + 80)
  5095. # asm 1: movq 80(<workp=int64#1),>mulrax=int64#7
  5096. # asm 2: movq 80(<workp=%rdi),>mulrax=%rax
  5097. movq 80(%rdi),%rax
  5098. # qhasm: (uint128) mulrdx mulrax = mulrax * t51_stack
  5099. # asm 1: mulq <t51_stack=stack64#29
  5100. # asm 2: mulq <t51_stack=224(%rsp)
  5101. mulq 224(%rsp)
  5102. # qhasm: zp1 = mulrax
  5103. # asm 1: mov <mulrax=int64#7,>zp1=int64#5
  5104. # asm 2: mov <mulrax=%rax,>zp1=%r8
  5105. mov %rax,%r8
  5106. # qhasm: mulr11 = mulrdx
  5107. # asm 1: mov <mulrdx=int64#3,>mulr11=int64#6
  5108. # asm 2: mov <mulrdx=%rdx,>mulr11=%r9
  5109. mov %rdx,%r9
  5110. # qhasm: mulrax = *(uint64 *)(workp + 80)
  5111. # asm 1: movq 80(<workp=int64#1),>mulrax=int64#7
  5112. # asm 2: movq 80(<workp=%rdi),>mulrax=%rax
  5113. movq 80(%rdi),%rax
  5114. # qhasm: (uint128) mulrdx mulrax = mulrax * t52_stack
  5115. # asm 1: mulq <t52_stack=stack64#30
  5116. # asm 2: mulq <t52_stack=232(%rsp)
  5117. mulq 232(%rsp)
  5118. # qhasm: zp2 = mulrax
  5119. # asm 1: mov <mulrax=int64#7,>zp2=int64#8
  5120. # asm 2: mov <mulrax=%rax,>zp2=%r10
  5121. mov %rax,%r10
  5122. # qhasm: mulr21 = mulrdx
  5123. # asm 1: mov <mulrdx=int64#3,>mulr21=int64#9
  5124. # asm 2: mov <mulrdx=%rdx,>mulr21=%r11
  5125. mov %rdx,%r11
  5126. # qhasm: mulrax = *(uint64 *)(workp + 80)
  5127. # asm 1: movq 80(<workp=int64#1),>mulrax=int64#7
  5128. # asm 2: movq 80(<workp=%rdi),>mulrax=%rax
  5129. movq 80(%rdi),%rax
  5130. # qhasm: (uint128) mulrdx mulrax = mulrax * t53_stack
  5131. # asm 1: mulq <t53_stack=stack64#31
  5132. # asm 2: mulq <t53_stack=240(%rsp)
  5133. mulq 240(%rsp)
  5134. # qhasm: zp3 = mulrax
  5135. # asm 1: mov <mulrax=int64#7,>zp3=int64#10
  5136. # asm 2: mov <mulrax=%rax,>zp3=%r12
  5137. mov %rax,%r12
  5138. # qhasm: mulr31 = mulrdx
  5139. # asm 1: mov <mulrdx=int64#3,>mulr31=int64#11
  5140. # asm 2: mov <mulrdx=%rdx,>mulr31=%r13
  5141. mov %rdx,%r13
  5142. # qhasm: mulrax = *(uint64 *)(workp + 80)
  5143. # asm 1: movq 80(<workp=int64#1),>mulrax=int64#7
  5144. # asm 2: movq 80(<workp=%rdi),>mulrax=%rax
  5145. movq 80(%rdi),%rax
  5146. # qhasm: (uint128) mulrdx mulrax = mulrax * t54_stack
  5147. # asm 1: mulq <t54_stack=stack64#32
  5148. # asm 2: mulq <t54_stack=248(%rsp)
  5149. mulq 248(%rsp)
  5150. # qhasm: zp4 = mulrax
  5151. # asm 1: mov <mulrax=int64#7,>zp4=int64#12
  5152. # asm 2: mov <mulrax=%rax,>zp4=%r14
  5153. mov %rax,%r14
  5154. # qhasm: mulr41 = mulrdx
  5155. # asm 1: mov <mulrdx=int64#3,>mulr41=int64#13
  5156. # asm 2: mov <mulrdx=%rdx,>mulr41=%r15
  5157. mov %rdx,%r15
  5158. # qhasm: mulrax = *(uint64 *)(workp + 88)
  5159. # asm 1: movq 88(<workp=int64#1),>mulrax=int64#7
  5160. # asm 2: movq 88(<workp=%rdi),>mulrax=%rax
  5161. movq 88(%rdi),%rax
  5162. # qhasm: (uint128) mulrdx mulrax = mulrax * t50_stack
  5163. # asm 1: mulq <t50_stack=stack64#28
  5164. # asm 2: mulq <t50_stack=216(%rsp)
  5165. mulq 216(%rsp)
  5166. # qhasm: carry? zp1 += mulrax
  5167. # asm 1: add <mulrax=int64#7,<zp1=int64#5
  5168. # asm 2: add <mulrax=%rax,<zp1=%r8
  5169. add %rax,%r8
  5170. # qhasm: mulr11 += mulrdx + carry
  5171. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  5172. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  5173. adc %rdx,%r9
  5174. # qhasm: mulrax = *(uint64 *)(workp + 88)
  5175. # asm 1: movq 88(<workp=int64#1),>mulrax=int64#7
  5176. # asm 2: movq 88(<workp=%rdi),>mulrax=%rax
  5177. movq 88(%rdi),%rax
  5178. # qhasm: (uint128) mulrdx mulrax = mulrax * t51_stack
  5179. # asm 1: mulq <t51_stack=stack64#29
  5180. # asm 2: mulq <t51_stack=224(%rsp)
  5181. mulq 224(%rsp)
  5182. # qhasm: carry? zp2 += mulrax
  5183. # asm 1: add <mulrax=int64#7,<zp2=int64#8
  5184. # asm 2: add <mulrax=%rax,<zp2=%r10
  5185. add %rax,%r10
  5186. # qhasm: mulr21 += mulrdx + carry
  5187. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  5188. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  5189. adc %rdx,%r11
  5190. # qhasm: mulrax = *(uint64 *)(workp + 88)
  5191. # asm 1: movq 88(<workp=int64#1),>mulrax=int64#7
  5192. # asm 2: movq 88(<workp=%rdi),>mulrax=%rax
  5193. movq 88(%rdi),%rax
  5194. # qhasm: (uint128) mulrdx mulrax = mulrax * t52_stack
  5195. # asm 1: mulq <t52_stack=stack64#30
  5196. # asm 2: mulq <t52_stack=232(%rsp)
  5197. mulq 232(%rsp)
  5198. # qhasm: carry? zp3 += mulrax
  5199. # asm 1: add <mulrax=int64#7,<zp3=int64#10
  5200. # asm 2: add <mulrax=%rax,<zp3=%r12
  5201. add %rax,%r12
  5202. # qhasm: mulr31 += mulrdx + carry
  5203. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  5204. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  5205. adc %rdx,%r13
  5206. # qhasm: mulrax = *(uint64 *)(workp + 88)
  5207. # asm 1: movq 88(<workp=int64#1),>mulrax=int64#7
  5208. # asm 2: movq 88(<workp=%rdi),>mulrax=%rax
  5209. movq 88(%rdi),%rax
  5210. # qhasm: (uint128) mulrdx mulrax = mulrax * t53_stack
  5211. # asm 1: mulq <t53_stack=stack64#31
  5212. # asm 2: mulq <t53_stack=240(%rsp)
  5213. mulq 240(%rsp)
  5214. # qhasm: carry? zp4 += mulrax
  5215. # asm 1: add <mulrax=int64#7,<zp4=int64#12
  5216. # asm 2: add <mulrax=%rax,<zp4=%r14
  5217. add %rax,%r14
  5218. # qhasm: mulr41 += mulrdx + carry
  5219. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  5220. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  5221. adc %rdx,%r15
  5222. # qhasm: mulrax = *(uint64 *)(workp + 88)
  5223. # asm 1: movq 88(<workp=int64#1),>mulrax=int64#3
  5224. # asm 2: movq 88(<workp=%rdi),>mulrax=%rdx
  5225. movq 88(%rdi),%rdx
  5226. # qhasm: mulrax *= 19
  5227. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  5228. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  5229. imulq $19,%rdx,%rax
  5230. # qhasm: (uint128) mulrdx mulrax = mulrax * t54_stack
  5231. # asm 1: mulq <t54_stack=stack64#32
  5232. # asm 2: mulq <t54_stack=248(%rsp)
  5233. mulq 248(%rsp)
  5234. # qhasm: carry? zp0 += mulrax
  5235. # asm 1: add <mulrax=int64#7,<zp0=int64#2
  5236. # asm 2: add <mulrax=%rax,<zp0=%rsi
  5237. add %rax,%rsi
  5238. # qhasm: mulr01 += mulrdx + carry
  5239. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  5240. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  5241. adc %rdx,%rcx
  5242. # qhasm: mulrax = *(uint64 *)(workp + 96)
  5243. # asm 1: movq 96(<workp=int64#1),>mulrax=int64#7
  5244. # asm 2: movq 96(<workp=%rdi),>mulrax=%rax
  5245. movq 96(%rdi),%rax
  5246. # qhasm: (uint128) mulrdx mulrax = mulrax * t50_stack
  5247. # asm 1: mulq <t50_stack=stack64#28
  5248. # asm 2: mulq <t50_stack=216(%rsp)
  5249. mulq 216(%rsp)
  5250. # qhasm: carry? zp2 += mulrax
  5251. # asm 1: add <mulrax=int64#7,<zp2=int64#8
  5252. # asm 2: add <mulrax=%rax,<zp2=%r10
  5253. add %rax,%r10
  5254. # qhasm: mulr21 += mulrdx + carry
  5255. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  5256. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  5257. adc %rdx,%r11
  5258. # qhasm: mulrax = *(uint64 *)(workp + 96)
  5259. # asm 1: movq 96(<workp=int64#1),>mulrax=int64#7
  5260. # asm 2: movq 96(<workp=%rdi),>mulrax=%rax
  5261. movq 96(%rdi),%rax
  5262. # qhasm: (uint128) mulrdx mulrax = mulrax * t51_stack
  5263. # asm 1: mulq <t51_stack=stack64#29
  5264. # asm 2: mulq <t51_stack=224(%rsp)
  5265. mulq 224(%rsp)
  5266. # qhasm: carry? zp3 += mulrax
  5267. # asm 1: add <mulrax=int64#7,<zp3=int64#10
  5268. # asm 2: add <mulrax=%rax,<zp3=%r12
  5269. add %rax,%r12
  5270. # qhasm: mulr31 += mulrdx + carry
  5271. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  5272. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  5273. adc %rdx,%r13
  5274. # qhasm: mulrax = *(uint64 *)(workp + 96)
  5275. # asm 1: movq 96(<workp=int64#1),>mulrax=int64#7
  5276. # asm 2: movq 96(<workp=%rdi),>mulrax=%rax
  5277. movq 96(%rdi),%rax
  5278. # qhasm: (uint128) mulrdx mulrax = mulrax * t52_stack
  5279. # asm 1: mulq <t52_stack=stack64#30
  5280. # asm 2: mulq <t52_stack=232(%rsp)
  5281. mulq 232(%rsp)
  5282. # qhasm: carry? zp4 += mulrax
  5283. # asm 1: add <mulrax=int64#7,<zp4=int64#12
  5284. # asm 2: add <mulrax=%rax,<zp4=%r14
  5285. add %rax,%r14
  5286. # qhasm: mulr41 += mulrdx + carry
  5287. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  5288. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  5289. adc %rdx,%r15
  5290. # qhasm: mulrax = *(uint64 *)(workp + 96)
  5291. # asm 1: movq 96(<workp=int64#1),>mulrax=int64#3
  5292. # asm 2: movq 96(<workp=%rdi),>mulrax=%rdx
  5293. movq 96(%rdi),%rdx
  5294. # qhasm: mulrax *= 19
  5295. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  5296. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  5297. imulq $19,%rdx,%rax
  5298. # qhasm: (uint128) mulrdx mulrax = mulrax * t53_stack
  5299. # asm 1: mulq <t53_stack=stack64#31
  5300. # asm 2: mulq <t53_stack=240(%rsp)
  5301. mulq 240(%rsp)
  5302. # qhasm: carry? zp0 += mulrax
  5303. # asm 1: add <mulrax=int64#7,<zp0=int64#2
  5304. # asm 2: add <mulrax=%rax,<zp0=%rsi
  5305. add %rax,%rsi
  5306. # qhasm: mulr01 += mulrdx + carry
  5307. # asm 1: adc <mulrdx=int64#3,<mulr01=int64#4
  5308. # asm 2: adc <mulrdx=%rdx,<mulr01=%rcx
  5309. adc %rdx,%rcx
  5310. # qhasm: mulrax = *(uint64 *)(workp + 96)
  5311. # asm 1: movq 96(<workp=int64#1),>mulrax=int64#3
  5312. # asm 2: movq 96(<workp=%rdi),>mulrax=%rdx
  5313. movq 96(%rdi),%rdx
  5314. # qhasm: mulrax *= 19
  5315. # asm 1: imulq $19,<mulrax=int64#3,>mulrax=int64#7
  5316. # asm 2: imulq $19,<mulrax=%rdx,>mulrax=%rax
  5317. imulq $19,%rdx,%rax
  5318. # qhasm: (uint128) mulrdx mulrax = mulrax * t54_stack
  5319. # asm 1: mulq <t54_stack=stack64#32
  5320. # asm 2: mulq <t54_stack=248(%rsp)
  5321. mulq 248(%rsp)
  5322. # qhasm: carry? zp1 += mulrax
  5323. # asm 1: add <mulrax=int64#7,<zp1=int64#5
  5324. # asm 2: add <mulrax=%rax,<zp1=%r8
  5325. add %rax,%r8
  5326. # qhasm: mulr11 += mulrdx + carry
  5327. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  5328. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  5329. adc %rdx,%r9
  5330. # qhasm: mulrax = *(uint64 *)(workp + 104)
  5331. # asm 1: movq 104(<workp=int64#1),>mulrax=int64#7
  5332. # asm 2: movq 104(<workp=%rdi),>mulrax=%rax
  5333. movq 104(%rdi),%rax
  5334. # qhasm: (uint128) mulrdx mulrax = mulrax * t50_stack
  5335. # asm 1: mulq <t50_stack=stack64#28
  5336. # asm 2: mulq <t50_stack=216(%rsp)
  5337. mulq 216(%rsp)
  5338. # qhasm: carry? zp3 += mulrax
  5339. # asm 1: add <mulrax=int64#7,<zp3=int64#10
  5340. # asm 2: add <mulrax=%rax,<zp3=%r12
  5341. add %rax,%r12
  5342. # qhasm: mulr31 += mulrdx + carry
  5343. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  5344. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  5345. adc %rdx,%r13
  5346. # qhasm: mulrax = *(uint64 *)(workp + 104)
  5347. # asm 1: movq 104(<workp=int64#1),>mulrax=int64#7
  5348. # asm 2: movq 104(<workp=%rdi),>mulrax=%rax
  5349. movq 104(%rdi),%rax
  5350. # qhasm: (uint128) mulrdx mulrax = mulrax * t51_stack
  5351. # asm 1: mulq <t51_stack=stack64#29
  5352. # asm 2: mulq <t51_stack=224(%rsp)
  5353. mulq 224(%rsp)
  5354. # qhasm: carry? zp4 += mulrax
  5355. # asm 1: add <mulrax=int64#7,<zp4=int64#12
  5356. # asm 2: add <mulrax=%rax,<zp4=%r14
  5357. add %rax,%r14
  5358. # qhasm: mulr41 += mulrdx + carry
  5359. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  5360. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  5361. adc %rdx,%r15
  5362. # qhasm: mulrax = mulx319_stack
  5363. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  5364. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  5365. movq 56(%rsp),%rax
  5366. # qhasm: (uint128) mulrdx mulrax = mulrax * t53_stack
  5367. # asm 1: mulq <t53_stack=stack64#31
  5368. # asm 2: mulq <t53_stack=240(%rsp)
  5369. mulq 240(%rsp)
  5370. # qhasm: carry? zp1 += mulrax
  5371. # asm 1: add <mulrax=int64#7,<zp1=int64#5
  5372. # asm 2: add <mulrax=%rax,<zp1=%r8
  5373. add %rax,%r8
  5374. # qhasm: mulr11 += mulrdx + carry
  5375. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  5376. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  5377. adc %rdx,%r9
  5378. # qhasm: mulrax = mulx319_stack
  5379. # asm 1: movq <mulx319_stack=stack64#8,>mulrax=int64#7
  5380. # asm 2: movq <mulx319_stack=56(%rsp),>mulrax=%rax
  5381. movq 56(%rsp),%rax
  5382. # qhasm: (uint128) mulrdx mulrax = mulrax * t54_stack
  5383. # asm 1: mulq <t54_stack=stack64#32
  5384. # asm 2: mulq <t54_stack=248(%rsp)
  5385. mulq 248(%rsp)
  5386. # qhasm: carry? zp2 += mulrax
  5387. # asm 1: add <mulrax=int64#7,<zp2=int64#8
  5388. # asm 2: add <mulrax=%rax,<zp2=%r10
  5389. add %rax,%r10
  5390. # qhasm: mulr21 += mulrdx + carry
  5391. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  5392. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  5393. adc %rdx,%r11
  5394. # qhasm: mulrax = *(uint64 *)(workp + 112)
  5395. # asm 1: movq 112(<workp=int64#1),>mulrax=int64#7
  5396. # asm 2: movq 112(<workp=%rdi),>mulrax=%rax
  5397. movq 112(%rdi),%rax
  5398. # qhasm: (uint128) mulrdx mulrax = mulrax * t50_stack
  5399. # asm 1: mulq <t50_stack=stack64#28
  5400. # asm 2: mulq <t50_stack=216(%rsp)
  5401. mulq 216(%rsp)
  5402. # qhasm: carry? zp4 += mulrax
  5403. # asm 1: add <mulrax=int64#7,<zp4=int64#12
  5404. # asm 2: add <mulrax=%rax,<zp4=%r14
  5405. add %rax,%r14
  5406. # qhasm: mulr41 += mulrdx + carry
  5407. # asm 1: adc <mulrdx=int64#3,<mulr41=int64#13
  5408. # asm 2: adc <mulrdx=%rdx,<mulr41=%r15
  5409. adc %rdx,%r15
  5410. # qhasm: mulrax = mulx419_stack
  5411. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  5412. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  5413. movq 64(%rsp),%rax
  5414. # qhasm: (uint128) mulrdx mulrax = mulrax * t52_stack
  5415. # asm 1: mulq <t52_stack=stack64#30
  5416. # asm 2: mulq <t52_stack=232(%rsp)
  5417. mulq 232(%rsp)
  5418. # qhasm: carry? zp1 += mulrax
  5419. # asm 1: add <mulrax=int64#7,<zp1=int64#5
  5420. # asm 2: add <mulrax=%rax,<zp1=%r8
  5421. add %rax,%r8
  5422. # qhasm: mulr11 += mulrdx + carry
  5423. # asm 1: adc <mulrdx=int64#3,<mulr11=int64#6
  5424. # asm 2: adc <mulrdx=%rdx,<mulr11=%r9
  5425. adc %rdx,%r9
  5426. # qhasm: mulrax = mulx419_stack
  5427. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  5428. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  5429. movq 64(%rsp),%rax
  5430. # qhasm: (uint128) mulrdx mulrax = mulrax * t53_stack
  5431. # asm 1: mulq <t53_stack=stack64#31
  5432. # asm 2: mulq <t53_stack=240(%rsp)
  5433. mulq 240(%rsp)
  5434. # qhasm: carry? zp2 += mulrax
  5435. # asm 1: add <mulrax=int64#7,<zp2=int64#8
  5436. # asm 2: add <mulrax=%rax,<zp2=%r10
  5437. add %rax,%r10
  5438. # qhasm: mulr21 += mulrdx + carry
  5439. # asm 1: adc <mulrdx=int64#3,<mulr21=int64#9
  5440. # asm 2: adc <mulrdx=%rdx,<mulr21=%r11
  5441. adc %rdx,%r11
  5442. # qhasm: mulrax = mulx419_stack
  5443. # asm 1: movq <mulx419_stack=stack64#9,>mulrax=int64#7
  5444. # asm 2: movq <mulx419_stack=64(%rsp),>mulrax=%rax
  5445. movq 64(%rsp),%rax
  5446. # qhasm: (uint128) mulrdx mulrax = mulrax * t54_stack
  5447. # asm 1: mulq <t54_stack=stack64#32
  5448. # asm 2: mulq <t54_stack=248(%rsp)
  5449. mulq 248(%rsp)
  5450. # qhasm: carry? zp3 += mulrax
  5451. # asm 1: add <mulrax=int64#7,<zp3=int64#10
  5452. # asm 2: add <mulrax=%rax,<zp3=%r12
  5453. add %rax,%r12
  5454. # qhasm: mulr31 += mulrdx + carry
  5455. # asm 1: adc <mulrdx=int64#3,<mulr31=int64#11
  5456. # asm 2: adc <mulrdx=%rdx,<mulr31=%r13
  5457. adc %rdx,%r13
  5458. # qhasm: mulredmask = *(uint64 *) &crypto_scalarmult_curve25519_amd64_51_REDMASK51
  5459. # asm 1: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=int64#3
  5460. # asm 2: movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,>mulredmask=%rdx
  5461. movq crypto_scalarmult_curve25519_amd64_51_REDMASK51,%rdx
  5462. # qhasm: mulr01 = (mulr01.zp0) << 13
  5463. # asm 1: shld $13,<zp0=int64#2,<mulr01=int64#4
  5464. # asm 2: shld $13,<zp0=%rsi,<mulr01=%rcx
  5465. shld $13,%rsi,%rcx
  5466. # qhasm: zp0 &= mulredmask
  5467. # asm 1: and <mulredmask=int64#3,<zp0=int64#2
  5468. # asm 2: and <mulredmask=%rdx,<zp0=%rsi
  5469. and %rdx,%rsi
  5470. # qhasm: mulr11 = (mulr11.zp1) << 13
  5471. # asm 1: shld $13,<zp1=int64#5,<mulr11=int64#6
  5472. # asm 2: shld $13,<zp1=%r8,<mulr11=%r9
  5473. shld $13,%r8,%r9
  5474. # qhasm: zp1 &= mulredmask
  5475. # asm 1: and <mulredmask=int64#3,<zp1=int64#5
  5476. # asm 2: and <mulredmask=%rdx,<zp1=%r8
  5477. and %rdx,%r8
  5478. # qhasm: zp1 += mulr01
  5479. # asm 1: add <mulr01=int64#4,<zp1=int64#5
  5480. # asm 2: add <mulr01=%rcx,<zp1=%r8
  5481. add %rcx,%r8
  5482. # qhasm: mulr21 = (mulr21.zp2) << 13
  5483. # asm 1: shld $13,<zp2=int64#8,<mulr21=int64#9
  5484. # asm 2: shld $13,<zp2=%r10,<mulr21=%r11
  5485. shld $13,%r10,%r11
  5486. # qhasm: zp2 &= mulredmask
  5487. # asm 1: and <mulredmask=int64#3,<zp2=int64#8
  5488. # asm 2: and <mulredmask=%rdx,<zp2=%r10
  5489. and %rdx,%r10
  5490. # qhasm: zp2 += mulr11
  5491. # asm 1: add <mulr11=int64#6,<zp2=int64#8
  5492. # asm 2: add <mulr11=%r9,<zp2=%r10
  5493. add %r9,%r10
  5494. # qhasm: mulr31 = (mulr31.zp3) << 13
  5495. # asm 1: shld $13,<zp3=int64#10,<mulr31=int64#11
  5496. # asm 2: shld $13,<zp3=%r12,<mulr31=%r13
  5497. shld $13,%r12,%r13
  5498. # qhasm: zp3 &= mulredmask
  5499. # asm 1: and <mulredmask=int64#3,<zp3=int64#10
  5500. # asm 2: and <mulredmask=%rdx,<zp3=%r12
  5501. and %rdx,%r12
  5502. # qhasm: zp3 += mulr21
  5503. # asm 1: add <mulr21=int64#9,<zp3=int64#10
  5504. # asm 2: add <mulr21=%r11,<zp3=%r12
  5505. add %r11,%r12
  5506. # qhasm: mulr41 = (mulr41.zp4) << 13
  5507. # asm 1: shld $13,<zp4=int64#12,<mulr41=int64#13
  5508. # asm 2: shld $13,<zp4=%r14,<mulr41=%r15
  5509. shld $13,%r14,%r15
  5510. # qhasm: zp4 &= mulredmask
  5511. # asm 1: and <mulredmask=int64#3,<zp4=int64#12
  5512. # asm 2: and <mulredmask=%rdx,<zp4=%r14
  5513. and %rdx,%r14
  5514. # qhasm: zp4 += mulr31
  5515. # asm 1: add <mulr31=int64#11,<zp4=int64#12
  5516. # asm 2: add <mulr31=%r13,<zp4=%r14
  5517. add %r13,%r14
  5518. # qhasm: mulr41 = mulr41 * 19
  5519. # asm 1: imulq $19,<mulr41=int64#13,>mulr41=int64#4
  5520. # asm 2: imulq $19,<mulr41=%r15,>mulr41=%rcx
  5521. imulq $19,%r15,%rcx
  5522. # qhasm: zp0 += mulr41
  5523. # asm 1: add <mulr41=int64#4,<zp0=int64#2
  5524. # asm 2: add <mulr41=%rcx,<zp0=%rsi
  5525. add %rcx,%rsi
  5526. # qhasm: mult = zp0
  5527. # asm 1: mov <zp0=int64#2,>mult=int64#4
  5528. # asm 2: mov <zp0=%rsi,>mult=%rcx
  5529. mov %rsi,%rcx
  5530. # qhasm: (uint64) mult >>= 51
  5531. # asm 1: shr $51,<mult=int64#4
  5532. # asm 2: shr $51,<mult=%rcx
  5533. shr $51,%rcx
  5534. # qhasm: mult += zp1
  5535. # asm 1: add <zp1=int64#5,<mult=int64#4
  5536. # asm 2: add <zp1=%r8,<mult=%rcx
  5537. add %r8,%rcx
  5538. # qhasm: zp1 = mult
  5539. # asm 1: mov <mult=int64#4,>zp1=int64#5
  5540. # asm 2: mov <mult=%rcx,>zp1=%r8
  5541. mov %rcx,%r8
  5542. # qhasm: (uint64) mult >>= 51
  5543. # asm 1: shr $51,<mult=int64#4
  5544. # asm 2: shr $51,<mult=%rcx
  5545. shr $51,%rcx
  5546. # qhasm: zp0 &= mulredmask
  5547. # asm 1: and <mulredmask=int64#3,<zp0=int64#2
  5548. # asm 2: and <mulredmask=%rdx,<zp0=%rsi
  5549. and %rdx,%rsi
  5550. # qhasm: mult += zp2
  5551. # asm 1: add <zp2=int64#8,<mult=int64#4
  5552. # asm 2: add <zp2=%r10,<mult=%rcx
  5553. add %r10,%rcx
  5554. # qhasm: zp2 = mult
  5555. # asm 1: mov <mult=int64#4,>zp2=int64#6
  5556. # asm 2: mov <mult=%rcx,>zp2=%r9
  5557. mov %rcx,%r9
  5558. # qhasm: (uint64) mult >>= 51
  5559. # asm 1: shr $51,<mult=int64#4
  5560. # asm 2: shr $51,<mult=%rcx
  5561. shr $51,%rcx
  5562. # qhasm: zp1 &= mulredmask
  5563. # asm 1: and <mulredmask=int64#3,<zp1=int64#5
  5564. # asm 2: and <mulredmask=%rdx,<zp1=%r8
  5565. and %rdx,%r8
  5566. # qhasm: mult += zp3
  5567. # asm 1: add <zp3=int64#10,<mult=int64#4
  5568. # asm 2: add <zp3=%r12,<mult=%rcx
  5569. add %r12,%rcx
  5570. # qhasm: zp3 = mult
  5571. # asm 1: mov <mult=int64#4,>zp3=int64#7
  5572. # asm 2: mov <mult=%rcx,>zp3=%rax
  5573. mov %rcx,%rax
  5574. # qhasm: (uint64) mult >>= 51
  5575. # asm 1: shr $51,<mult=int64#4
  5576. # asm 2: shr $51,<mult=%rcx
  5577. shr $51,%rcx
  5578. # qhasm: zp2 &= mulredmask
  5579. # asm 1: and <mulredmask=int64#3,<zp2=int64#6
  5580. # asm 2: and <mulredmask=%rdx,<zp2=%r9
  5581. and %rdx,%r9
  5582. # qhasm: mult += zp4
  5583. # asm 1: add <zp4=int64#12,<mult=int64#4
  5584. # asm 2: add <zp4=%r14,<mult=%rcx
  5585. add %r14,%rcx
  5586. # qhasm: zp4 = mult
  5587. # asm 1: mov <mult=int64#4,>zp4=int64#8
  5588. # asm 2: mov <mult=%rcx,>zp4=%r10
  5589. mov %rcx,%r10
  5590. # qhasm: (uint64) mult >>= 51
  5591. # asm 1: shr $51,<mult=int64#4
  5592. # asm 2: shr $51,<mult=%rcx
  5593. shr $51,%rcx
  5594. # qhasm: zp3 &= mulredmask
  5595. # asm 1: and <mulredmask=int64#3,<zp3=int64#7
  5596. # asm 2: and <mulredmask=%rdx,<zp3=%rax
  5597. and %rdx,%rax
  5598. # qhasm: mult *= 19
  5599. # asm 1: imulq $19,<mult=int64#4,>mult=int64#4
  5600. # asm 2: imulq $19,<mult=%rcx,>mult=%rcx
  5601. imulq $19,%rcx,%rcx
  5602. # qhasm: zp0 += mult
  5603. # asm 1: add <mult=int64#4,<zp0=int64#2
  5604. # asm 2: add <mult=%rcx,<zp0=%rsi
  5605. add %rcx,%rsi
  5606. # qhasm: zp4 &= mulredmask
  5607. # asm 1: and <mulredmask=int64#3,<zp4=int64#8
  5608. # asm 2: and <mulredmask=%rdx,<zp4=%r10
  5609. and %rdx,%r10
  5610. # qhasm: *(uint64 *)(workp + 80) = zp0
  5611. # asm 1: movq <zp0=int64#2,80(<workp=int64#1)
  5612. # asm 2: movq <zp0=%rsi,80(<workp=%rdi)
  5613. movq %rsi,80(%rdi)
  5614. # qhasm: *(uint64 *)(workp + 88) = zp1
  5615. # asm 1: movq <zp1=int64#5,88(<workp=int64#1)
  5616. # asm 2: movq <zp1=%r8,88(<workp=%rdi)
  5617. movq %r8,88(%rdi)
  5618. # qhasm: *(uint64 *)(workp + 96) = zp2
  5619. # asm 1: movq <zp2=int64#6,96(<workp=int64#1)
  5620. # asm 2: movq <zp2=%r9,96(<workp=%rdi)
  5621. movq %r9,96(%rdi)
  5622. # qhasm: *(uint64 *)(workp + 104) = zp3
  5623. # asm 1: movq <zp3=int64#7,104(<workp=int64#1)
  5624. # asm 2: movq <zp3=%rax,104(<workp=%rdi)
  5625. movq %rax,104(%rdi)
  5626. # qhasm: *(uint64 *)(workp + 112) = zp4
  5627. # asm 1: movq <zp4=int64#8,112(<workp=int64#1)
  5628. # asm 2: movq <zp4=%r10,112(<workp=%rdi)
  5629. movq %r10,112(%rdi)
  5630. # qhasm: caller1 = caller1_stack
  5631. # asm 1: movq <caller1_stack=stack64#1,>caller1=int64#9
  5632. # asm 2: movq <caller1_stack=0(%rsp),>caller1=%r11
  5633. movq 0(%rsp),%r11
  5634. # qhasm: caller2 = caller2_stack
  5635. # asm 1: movq <caller2_stack=stack64#2,>caller2=int64#10
  5636. # asm 2: movq <caller2_stack=8(%rsp),>caller2=%r12
  5637. movq 8(%rsp),%r12
  5638. # qhasm: caller3 = caller3_stack
  5639. # asm 1: movq <caller3_stack=stack64#3,>caller3=int64#11
  5640. # asm 2: movq <caller3_stack=16(%rsp),>caller3=%r13
  5641. movq 16(%rsp),%r13
  5642. # qhasm: caller4 = caller4_stack
  5643. # asm 1: movq <caller4_stack=stack64#4,>caller4=int64#12
  5644. # asm 2: movq <caller4_stack=24(%rsp),>caller4=%r14
  5645. movq 24(%rsp),%r14
  5646. # qhasm: caller5 = caller5_stack
  5647. # asm 1: movq <caller5_stack=stack64#5,>caller5=int64#13
  5648. # asm 2: movq <caller5_stack=32(%rsp),>caller5=%r15
  5649. movq 32(%rsp),%r15
  5650. # qhasm: caller6 = caller6_stack
  5651. # asm 1: movq <caller6_stack=stack64#6,>caller6=int64#14
  5652. # asm 2: movq <caller6_stack=40(%rsp),>caller6=%rbx
  5653. movq 40(%rsp),%rbx
  5654. # qhasm: caller7 = caller7_stack
  5655. # asm 1: movq <caller7_stack=stack64#7,>caller7=int64#15
  5656. # asm 2: movq <caller7_stack=48(%rsp),>caller7=%rbp
  5657. movq 48(%rsp),%rbp
  5658. # qhasm: leave
  5659. add %r11,%rsp
  5660. mov %rdi,%rax
  5661. mov %rsi,%rdx
  5662. ret