1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791 |
- ; /* aes_gcm_asm.asm */
- ; /*
- ; * Copyright (C) 2006-2023 wolfSSL Inc.
- ; *
- ; * This file is part of wolfSSL.
- ; *
- ; * wolfSSL is free software; you can redistribute it and/or modify
- ; * it under the terms of the GNU General Public License as published by
- ; * the Free Software Foundation; either version 2 of the License, or
- ; * (at your option) any later version.
- ; *
- ; * wolfSSL is distributed in the hope that it will be useful,
- ; * but WITHOUT ANY WARRANTY; without even the implied warranty of
- ; * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- ; * GNU General Public License for more details.
- ; *
- ; * You should have received a copy of the GNU General Public License
- ; * along with this program; if not, write to the Free Software
- ; * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1335, USA
- ; */
- IF @Version LT 1200
- ; AVX2 instructions not recognized by old versions of MASM
- IFNDEF NO_AVX2_SUPPORT
- NO_AVX2_SUPPORT = 1
- ENDIF
- ; MOVBE instruction not recognized by old versions of MASM
- IFNDEF NO_MOVBE_SUPPORT
- NO_MOVBE_SUPPORT = 1
- ENDIF
- ENDIF
- IFNDEF HAVE_INTEL_AVX1
- HAVE_INTEL_AVX1 = 1
- ENDIF
- IFNDEF NO_AVX2_SUPPORT
- HAVE_INTEL_AVX2 = 1
- ENDIF
- IFNDEF _WIN64
- _WIN64 = 1
- ENDIF
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_one QWORD 0, 1
- ptr_L_aes_gcm_one QWORD L_aes_gcm_one
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_two QWORD 0, 2
- ptr_L_aes_gcm_two QWORD L_aes_gcm_two
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_three QWORD 0, 3
- ptr_L_aes_gcm_three QWORD L_aes_gcm_three
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_four QWORD 0, 4
- ptr_L_aes_gcm_four QWORD L_aes_gcm_four
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_five QWORD 0, 5
- ptr_L_aes_gcm_five QWORD L_aes_gcm_five
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_six QWORD 0, 6
- ptr_L_aes_gcm_six QWORD L_aes_gcm_six
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_seven QWORD 0, 7
- ptr_L_aes_gcm_seven QWORD L_aes_gcm_seven
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_eight QWORD 0, 8
- ptr_L_aes_gcm_eight QWORD L_aes_gcm_eight
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_bswap_epi64 QWORD 283686952306183, 579005069656919567
- ptr_L_aes_gcm_bswap_epi64 QWORD L_aes_gcm_bswap_epi64
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_bswap_mask QWORD 579005069656919567, 283686952306183
- ptr_L_aes_gcm_bswap_mask QWORD L_aes_gcm_bswap_mask
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_aes_gcm_mod2_128 QWORD 1, 13979173243358019584
- ptr_L_aes_gcm_mod2_128 QWORD L_aes_gcm_mod2_128
- _DATA ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_aesni PROC
- push r13
- push rdi
- push rsi
- push r12
- push rbx
- push r14
- push r15
- mov rdi, rcx
- mov rsi, rdx
- mov r12, r8
- mov rax, r9
- mov r8, QWORD PTR [rsp+96]
- mov r9d, DWORD PTR [rsp+104]
- mov r11d, DWORD PTR [rsp+112]
- mov ebx, DWORD PTR [rsp+120]
- mov r14d, DWORD PTR [rsp+128]
- mov r15, QWORD PTR [rsp+136]
- mov r10d, DWORD PTR [rsp+144]
- sub rsp, 320
- movdqu [rsp+160], xmm6
- movdqu [rsp+176], xmm7
- movdqu [rsp+192], xmm8
- movdqu [rsp+208], xmm9
- movdqu [rsp+224], xmm10
- movdqu [rsp+240], xmm11
- movdqu [rsp+256], xmm12
- movdqu [rsp+272], xmm13
- movdqu [rsp+288], xmm14
- movdqu [rsp+304], xmm15
- pxor xmm4, xmm4
- pxor xmm6, xmm6
- cmp ebx, 12
- mov edx, ebx
- jne L_AES_GCM_encrypt_aesni_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- pinsrq xmm4, QWORD PTR [rax], 0
- pinsrd xmm4, DWORD PTR [rax+8], 2
- pinsrd xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- movdqa xmm1, xmm4
- movdqa xmm5, OWORD PTR [r15]
- pxor xmm1, xmm5
- movdqa xmm7, OWORD PTR [r15+16]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+32]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+48]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+64]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+80]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+96]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+112]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+128]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+144]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- cmp r10d, 11
- movdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_calc_iv_12_last
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+176]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- cmp r10d, 13
- movdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_calc_iv_12_last
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+208]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_calc_iv_12_last:
- aesenclast xmm5, xmm7
- aesenclast xmm1, xmm7
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- movdqu [rsp+144], xmm1
- jmp L_AES_GCM_encrypt_aesni_iv_done
- L_AES_GCM_encrypt_aesni_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- movdqa xmm5, OWORD PTR [r15]
- aesenc xmm5, [r15+16]
- aesenc xmm5, [r15+32]
- aesenc xmm5, [r15+48]
- aesenc xmm5, [r15+64]
- aesenc xmm5, [r15+80]
- aesenc xmm5, [r15+96]
- aesenc xmm5, [r15+112]
- aesenc xmm5, [r15+128]
- aesenc xmm5, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm9
- aesenc xmm5, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm9
- aesenc xmm5, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_calc_iv_1_aesenc_avx_last:
- aesenclast xmm5, xmm9
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_encrypt_aesni_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_encrypt_aesni_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_aesni_calc_iv_16_loop:
- movdqu xmm8, [rax+rcx]
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm8
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_aesni_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_encrypt_aesni_calc_iv_done
- L_AES_GCM_encrypt_aesni_calc_iv_lt16:
- sub rsp, 16
- pxor xmm8, xmm8
- xor ebx, ebx
- movdqu [rsp], xmm8
- L_AES_GCM_encrypt_aesni_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_aesni_calc_iv_loop
- movdqu xmm8, [rsp]
- add rsp, 16
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm8
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- L_AES_GCM_encrypt_aesni_calc_iv_done:
- ; T = Encrypt counter
- pxor xmm0, xmm0
- shl edx, 3
- pinsrq xmm0, rdx, 0
- pxor xmm4, xmm0
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- ; Encrypt counter
- movdqa xmm8, OWORD PTR [r15]
- pxor xmm8, xmm4
- aesenc xmm8, [r15+16]
- aesenc xmm8, [r15+32]
- aesenc xmm8, [r15+48]
- aesenc xmm8, [r15+64]
- aesenc xmm8, [r15+80]
- aesenc xmm8, [r15+96]
- aesenc xmm8, [r15+112]
- aesenc xmm8, [r15+128]
- aesenc xmm8, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_calc_iv_2_aesenc_avx_last:
- aesenclast xmm8, xmm9
- movdqu [rsp+144], xmm8
- L_AES_GCM_encrypt_aesni_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_encrypt_aesni_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_encrypt_aesni_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_aesni_calc_aad_16_loop:
- movdqu xmm8, [r12+rcx]
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- pshufd xmm1, xmm6, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm6, 17
- pclmulqdq xmm0, xmm6, 0
- pxor xmm1, xmm6
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm6
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm6, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm6, xmm2
- por xmm7, xmm0
- por xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm6, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_aesni_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_encrypt_aesni_calc_aad_done
- L_AES_GCM_encrypt_aesni_calc_aad_lt16:
- sub rsp, 16
- pxor xmm8, xmm8
- xor ebx, ebx
- movdqu [rsp], xmm8
- L_AES_GCM_encrypt_aesni_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_aesni_calc_aad_loop
- movdqu xmm8, [rsp]
- add rsp, 16
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- pshufd xmm1, xmm6, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm6, 17
- pclmulqdq xmm0, xmm6, 0
- pxor xmm1, xmm6
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm6
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm6, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm6, xmm2
- por xmm7, xmm0
- por xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm6, xmm2
- L_AES_GCM_encrypt_aesni_calc_aad_done:
- ; Calculate counter and H
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm9, xmm5
- paddd xmm4, OWORD PTR L_aes_gcm_one
- movdqa xmm8, xmm5
- movdqu [rsp+128], xmm4
- psrlq xmm9, 63
- psllq xmm8, 1
- pslldq xmm9, 8
- por xmm8, xmm9
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm8
- xor rbx, rbx
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_encrypt_aesni_done_128
- and r13d, 4294967168
- movdqa xmm2, xmm6
- ; H ^ 1
- movdqu [rsp], xmm5
- ; H ^ 2
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm5, 78
- movdqa xmm11, xmm5
- movdqa xmm8, xmm5
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm5
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm0, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm0, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm0, xmm14
- movdqu [rsp+16], xmm0
- ; H ^ 3
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm1, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm1, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm1, xmm14
- movdqu [rsp+32], xmm1
- ; H ^ 4
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm3, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm3, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm3, xmm14
- movdqu [rsp+48], xmm3
- ; H ^ 5
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+64], xmm7
- ; H ^ 6
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+80], xmm7
- ; H ^ 7
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+96], xmm7
- ; H ^ 8
- pshufd xmm9, xmm3, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm3, 17
- pclmulqdq xmm8, xmm3, 0
- pxor xmm9, xmm3
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+112], xmm7
- ; First 128 bytes of input
- movdqu xmm8, [rsp+128]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [r15]
- movdqu [rsp+128], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+16]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+32]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+48]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+64]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+80]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+96]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+112]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+128]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+144]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r10d, 11
- movdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_enc_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r10d, 13
- movdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_enc_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_enc_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [rdi]
- movdqu xmm1, [rdi+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [rsi], xmm8
- movdqu [rsi+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [rdi+32]
- movdqu xmm1, [rdi+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [rsi+32], xmm10
- movdqu [rsi+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [rdi+64]
- movdqu xmm1, [rdi+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [rsi+64], xmm12
- movdqu [rsi+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [rdi+96]
- movdqu xmm1, [rdi+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [rsi+96], xmm14
- movdqu [rsi+112], xmm15
- cmp r13d, 128
- mov ebx, 128
- jle L_AES_GCM_encrypt_aesni_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_aesni_ghash_128:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- movdqu xmm8, [rsp+128]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [r15]
- movdqu [rsp+128], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqu xmm7, [rsp+112]
- movdqu xmm0, [rdx+-128]
- aesenc xmm8, [r15+16]
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm0, xmm2
- pshufd xmm1, xmm7, 78
- pshufd xmm5, xmm0, 78
- pxor xmm1, xmm7
- pxor xmm5, xmm0
- movdqa xmm3, xmm0
- pclmulqdq xmm3, xmm7, 17
- aesenc xmm9, [r15+16]
- aesenc xmm10, [r15+16]
- movdqa xmm2, xmm0
- pclmulqdq xmm2, xmm7, 0
- aesenc xmm11, [r15+16]
- aesenc xmm12, [r15+16]
- pclmulqdq xmm1, xmm5, 0
- aesenc xmm13, [r15+16]
- aesenc xmm14, [r15+16]
- aesenc xmm15, [r15+16]
- pxor xmm1, xmm2
- pxor xmm1, xmm3
- movdqu xmm7, [rsp+96]
- movdqu xmm0, [rdx+-112]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+32]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+32]
- aesenc xmm10, [r15+32]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+32]
- aesenc xmm12, [r15+32]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+32]
- aesenc xmm14, [r15+32]
- aesenc xmm15, [r15+32]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+80]
- movdqu xmm0, [rdx+-96]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+48]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+48]
- aesenc xmm10, [r15+48]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+48]
- aesenc xmm12, [r15+48]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+48]
- aesenc xmm14, [r15+48]
- aesenc xmm15, [r15+48]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+64]
- movdqu xmm0, [rdx+-80]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+64]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+64]
- aesenc xmm10, [r15+64]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+64]
- aesenc xmm12, [r15+64]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+64]
- aesenc xmm14, [r15+64]
- aesenc xmm15, [r15+64]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+48]
- movdqu xmm0, [rdx+-64]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+80]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+80]
- aesenc xmm10, [r15+80]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+80]
- aesenc xmm12, [r15+80]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+80]
- aesenc xmm14, [r15+80]
- aesenc xmm15, [r15+80]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+32]
- movdqu xmm0, [rdx+-48]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+96]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+96]
- aesenc xmm10, [r15+96]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+96]
- aesenc xmm12, [r15+96]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+96]
- aesenc xmm14, [r15+96]
- aesenc xmm15, [r15+96]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+16]
- movdqu xmm0, [rdx+-32]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+112]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+112]
- aesenc xmm10, [r15+112]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+112]
- aesenc xmm12, [r15+112]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+112]
- aesenc xmm14, [r15+112]
- aesenc xmm15, [r15+112]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp]
- movdqu xmm0, [rdx+-16]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+128]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+128]
- aesenc xmm10, [r15+128]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+128]
- aesenc xmm12, [r15+128]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+128]
- aesenc xmm14, [r15+128]
- aesenc xmm15, [r15+128]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqa xmm5, xmm1
- psrldq xmm1, 8
- pslldq xmm5, 8
- aesenc xmm8, [r15+144]
- pxor xmm2, xmm5
- pxor xmm3, xmm1
- movdqa xmm7, xmm2
- movdqa xmm4, xmm2
- movdqa xmm5, xmm2
- aesenc xmm9, [r15+144]
- pslld xmm7, 31
- pslld xmm4, 30
- pslld xmm5, 25
- aesenc xmm10, [r15+144]
- pxor xmm7, xmm4
- pxor xmm7, xmm5
- aesenc xmm11, [r15+144]
- movdqa xmm4, xmm7
- pslldq xmm7, 12
- psrldq xmm4, 4
- aesenc xmm12, [r15+144]
- pxor xmm2, xmm7
- movdqa xmm5, xmm2
- movdqa xmm1, xmm2
- movdqa xmm0, xmm2
- aesenc xmm13, [r15+144]
- psrld xmm5, 1
- psrld xmm1, 2
- psrld xmm0, 7
- aesenc xmm14, [r15+144]
- pxor xmm5, xmm1
- pxor xmm5, xmm0
- aesenc xmm15, [r15+144]
- pxor xmm5, xmm4
- pxor xmm2, xmm5
- pxor xmm2, xmm3
- cmp r10d, 11
- movdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r10d, 13
- movdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_aesenc_128_ghash_avx_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [rcx]
- movdqu xmm1, [rcx+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [rdx], xmm8
- movdqu [rdx+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [rcx+32]
- movdqu xmm1, [rcx+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [rdx+32], xmm10
- movdqu [rdx+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [rcx+64]
- movdqu xmm1, [rcx+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [rdx+64], xmm12
- movdqu [rdx+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [rcx+96]
- movdqu xmm1, [rcx+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [rdx+96], xmm14
- movdqu [rdx+112], xmm15
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_aesni_ghash_128
- L_AES_GCM_encrypt_aesni_end_128:
- movdqa xmm4, OWORD PTR L_aes_gcm_bswap_mask
- pshufb xmm8, xmm4
- pshufb xmm9, xmm4
- pshufb xmm10, xmm4
- pshufb xmm11, xmm4
- pxor xmm8, xmm2
- pshufb xmm12, xmm4
- pshufb xmm13, xmm4
- pshufb xmm14, xmm4
- pshufb xmm15, xmm4
- movdqu xmm7, [rsp+112]
- pshufd xmm1, xmm8, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm8, 17
- pclmulqdq xmm0, xmm8, 0
- pxor xmm1, xmm8
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm4, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+96]
- pshufd xmm1, xmm9, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm9, 17
- pclmulqdq xmm0, xmm9, 0
- pxor xmm1, xmm9
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+80]
- pshufd xmm1, xmm10, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm10, 17
- pclmulqdq xmm0, xmm10, 0
- pxor xmm1, xmm10
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+64]
- pshufd xmm1, xmm11, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm11, 17
- pclmulqdq xmm0, xmm11, 0
- pxor xmm1, xmm11
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+48]
- pshufd xmm1, xmm12, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm12, 17
- pclmulqdq xmm0, xmm12, 0
- pxor xmm1, xmm12
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+32]
- pshufd xmm1, xmm13, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm13, 17
- pclmulqdq xmm0, xmm13, 0
- pxor xmm1, xmm13
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+16]
- pshufd xmm1, xmm14, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm14, 17
- pclmulqdq xmm0, xmm14, 0
- pxor xmm1, xmm14
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp]
- pshufd xmm1, xmm15, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm15, 17
- pclmulqdq xmm0, xmm15, 0
- pxor xmm1, xmm15
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm4
- movdqa xmm1, xmm4
- movdqa xmm2, xmm4
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm4, xmm0
- movdqa xmm2, xmm4
- movdqa xmm3, xmm4
- movdqa xmm0, xmm4
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm4
- pxor xmm6, xmm2
- movdqu xmm5, [rsp]
- L_AES_GCM_encrypt_aesni_done_128:
- mov edx, r9d
- cmp ebx, edx
- jge L_AES_GCM_encrypt_aesni_done_enc
- mov r13d, r9d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_aesni_last_block_done
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- movdqu xmm8, [rsp+128]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [r15]
- movdqu [rsp+128], xmm9
- aesenc xmm8, [r15+16]
- aesenc xmm8, [r15+32]
- aesenc xmm8, [r15+48]
- aesenc xmm8, [r15+64]
- aesenc xmm8, [r15+80]
- aesenc xmm8, [r15+96]
- aesenc xmm8, [r15+112]
- aesenc xmm8, [r15+128]
- aesenc xmm8, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_aesenc_block_aesenc_avx_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- add ebx, 16
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_aesni_last_block_ghash
- L_AES_GCM_encrypt_aesni_last_block_start:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- movdqu xmm8, [rsp+128]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [r15]
- movdqu [rsp+128], xmm9
- movdqa xmm10, xmm6
- pclmulqdq xmm10, xmm5, 16
- aesenc xmm8, [r15+16]
- aesenc xmm8, [r15+32]
- movdqa xmm11, xmm6
- pclmulqdq xmm11, xmm5, 1
- aesenc xmm8, [r15+48]
- aesenc xmm8, [r15+64]
- movdqa xmm12, xmm6
- pclmulqdq xmm12, xmm5, 0
- aesenc xmm8, [r15+80]
- movdqa xmm1, xmm6
- pclmulqdq xmm1, xmm5, 17
- aesenc xmm8, [r15+96]
- pxor xmm10, xmm11
- movdqa xmm2, xmm10
- psrldq xmm10, 8
- pslldq xmm2, 8
- aesenc xmm8, [r15+112]
- movdqa xmm3, xmm1
- pxor xmm2, xmm12
- pxor xmm3, xmm10
- movdqa xmm0, OWORD PTR L_aes_gcm_mod2_128
- movdqa xmm11, xmm2
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [r15+128]
- pshufd xmm10, xmm2, 78
- pxor xmm10, xmm11
- movdqa xmm11, xmm10
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [r15+144]
- pshufd xmm6, xmm10, 78
- pxor xmm6, xmm11
- pxor xmm6, xmm3
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_aesenc_gfmul_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- add ebx, 16
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_aesni_last_block_start
- L_AES_GCM_encrypt_aesni_last_block_ghash:
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- L_AES_GCM_encrypt_aesni_last_block_done:
- mov ecx, r9d
- mov edx, ecx
- and ecx, 15
- jz L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_done
- movdqu xmm4, [rsp+128]
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_epi64
- pxor xmm4, [r15]
- aesenc xmm4, [r15+16]
- aesenc xmm4, [r15+32]
- aesenc xmm4, [r15+48]
- aesenc xmm4, [r15+64]
- aesenc xmm4, [r15+80]
- aesenc xmm4, [r15+96]
- aesenc xmm4, [r15+112]
- aesenc xmm4, [r15+128]
- aesenc xmm4, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_aesenc_avx_last
- aesenc xmm4, xmm9
- aesenc xmm4, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_aesenc_avx_last
- aesenc xmm4, xmm9
- aesenc xmm4, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_aesenc_avx_last:
- aesenclast xmm4, xmm9
- sub rsp, 16
- xor ecx, ecx
- movdqu [rsp], xmm4
- L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [rsi+rbx], r13b
- mov BYTE PTR [rsp+rcx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_loop
- xor r13, r13
- cmp ecx, 16
- je L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_finish_enc
- L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_byte_loop:
- mov BYTE PTR [rsp+rcx], r13b
- inc ecx
- cmp ecx, 16
- jl L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_byte_loop
- L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_finish_enc:
- movdqu xmm4, [rsp]
- add rsp, 16
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm4
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- L_AES_GCM_encrypt_aesni_aesenc_last15_enc_avx_done:
- L_AES_GCM_encrypt_aesni_done_enc:
- mov edx, r9d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- pinsrq xmm0, rdx, 0
- pinsrq xmm0, rcx, 1
- pxor xmm6, xmm0
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- pshufb xmm6, OWORD PTR L_aes_gcm_bswap_mask
- movdqu xmm0, [rsp+144]
- pxor xmm0, xmm6
- cmp r14d, 16
- je L_AES_GCM_encrypt_aesni_store_tag_16
- xor rcx, rcx
- movdqu [rsp], xmm0
- L_AES_GCM_encrypt_aesni_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r8+rcx], r13b
- inc ecx
- cmp ecx, r14d
- jne L_AES_GCM_encrypt_aesni_store_tag_loop
- jmp L_AES_GCM_encrypt_aesni_store_tag_done
- L_AES_GCM_encrypt_aesni_store_tag_16:
- movdqu [r8], xmm0
- L_AES_GCM_encrypt_aesni_store_tag_done:
- movdqu xmm6, [rsp+160]
- movdqu xmm7, [rsp+176]
- movdqu xmm8, [rsp+192]
- movdqu xmm9, [rsp+208]
- movdqu xmm10, [rsp+224]
- movdqu xmm11, [rsp+240]
- movdqu xmm12, [rsp+256]
- movdqu xmm13, [rsp+272]
- movdqu xmm14, [rsp+288]
- movdqu xmm15, [rsp+304]
- add rsp, 320
- pop r15
- pop r14
- pop rbx
- pop r12
- pop rsi
- pop rdi
- pop r13
- ret
- AES_GCM_encrypt_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_aesni PROC
- push r13
- push rdi
- push rsi
- push r12
- push rbx
- push r14
- push r15
- push rbp
- mov rdi, rcx
- mov rsi, rdx
- mov r12, r8
- mov rax, r9
- mov r8, QWORD PTR [rsp+104]
- mov r9d, DWORD PTR [rsp+112]
- mov r11d, DWORD PTR [rsp+120]
- mov ebx, DWORD PTR [rsp+128]
- mov r14d, DWORD PTR [rsp+136]
- mov r15, QWORD PTR [rsp+144]
- mov r10d, DWORD PTR [rsp+152]
- mov rbp, QWORD PTR [rsp+160]
- sub rsp, 328
- movdqu [rsp+168], xmm6
- movdqu [rsp+184], xmm7
- movdqu [rsp+200], xmm8
- movdqu [rsp+216], xmm9
- movdqu [rsp+232], xmm10
- movdqu [rsp+248], xmm11
- movdqu [rsp+264], xmm12
- movdqu [rsp+280], xmm13
- movdqu [rsp+296], xmm14
- movdqu [rsp+312], xmm15
- pxor xmm4, xmm4
- pxor xmm6, xmm6
- cmp ebx, 12
- mov edx, ebx
- jne L_AES_GCM_decrypt_aesni_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- pinsrq xmm4, QWORD PTR [rax], 0
- pinsrd xmm4, DWORD PTR [rax+8], 2
- pinsrd xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- movdqa xmm1, xmm4
- movdqa xmm5, OWORD PTR [r15]
- pxor xmm1, xmm5
- movdqa xmm7, OWORD PTR [r15+16]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+32]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+48]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+64]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+80]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+96]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+112]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+128]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+144]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- cmp r10d, 11
- movdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_calc_iv_12_last
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+176]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- cmp r10d, 13
- movdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_calc_iv_12_last
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+208]
- aesenc xmm5, xmm7
- aesenc xmm1, xmm7
- movdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_calc_iv_12_last:
- aesenclast xmm5, xmm7
- aesenclast xmm1, xmm7
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- movdqu [rsp+144], xmm1
- jmp L_AES_GCM_decrypt_aesni_iv_done
- L_AES_GCM_decrypt_aesni_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- movdqa xmm5, OWORD PTR [r15]
- aesenc xmm5, [r15+16]
- aesenc xmm5, [r15+32]
- aesenc xmm5, [r15+48]
- aesenc xmm5, [r15+64]
- aesenc xmm5, [r15+80]
- aesenc xmm5, [r15+96]
- aesenc xmm5, [r15+112]
- aesenc xmm5, [r15+128]
- aesenc xmm5, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm9
- aesenc xmm5, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm9
- aesenc xmm5, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_calc_iv_1_aesenc_avx_last:
- aesenclast xmm5, xmm9
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_decrypt_aesni_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_decrypt_aesni_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_aesni_calc_iv_16_loop:
- movdqu xmm8, [rax+rcx]
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm8
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_aesni_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_decrypt_aesni_calc_iv_done
- L_AES_GCM_decrypt_aesni_calc_iv_lt16:
- sub rsp, 16
- pxor xmm8, xmm8
- xor ebx, ebx
- movdqu [rsp], xmm8
- L_AES_GCM_decrypt_aesni_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_aesni_calc_iv_loop
- movdqu xmm8, [rsp]
- add rsp, 16
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm8
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- L_AES_GCM_decrypt_aesni_calc_iv_done:
- ; T = Encrypt counter
- pxor xmm0, xmm0
- shl edx, 3
- pinsrq xmm0, rdx, 0
- pxor xmm4, xmm0
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm7, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm4, xmm2
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- ; Encrypt counter
- movdqa xmm8, OWORD PTR [r15]
- pxor xmm8, xmm4
- aesenc xmm8, [r15+16]
- aesenc xmm8, [r15+32]
- aesenc xmm8, [r15+48]
- aesenc xmm8, [r15+64]
- aesenc xmm8, [r15+80]
- aesenc xmm8, [r15+96]
- aesenc xmm8, [r15+112]
- aesenc xmm8, [r15+128]
- aesenc xmm8, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_calc_iv_2_aesenc_avx_last:
- aesenclast xmm8, xmm9
- movdqu [rsp+144], xmm8
- L_AES_GCM_decrypt_aesni_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_decrypt_aesni_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_decrypt_aesni_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_aesni_calc_aad_16_loop:
- movdqu xmm8, [r12+rcx]
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- pshufd xmm1, xmm6, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm6, 17
- pclmulqdq xmm0, xmm6, 0
- pxor xmm1, xmm6
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm6
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm6, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm6, xmm2
- por xmm7, xmm0
- por xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm6, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_aesni_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_decrypt_aesni_calc_aad_done
- L_AES_GCM_decrypt_aesni_calc_aad_lt16:
- sub rsp, 16
- pxor xmm8, xmm8
- xor ebx, ebx
- movdqu [rsp], xmm8
- L_AES_GCM_decrypt_aesni_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_aesni_calc_aad_loop
- movdqu xmm8, [rsp]
- add rsp, 16
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- pshufd xmm1, xmm6, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm6, 17
- pclmulqdq xmm0, xmm6, 0
- pxor xmm1, xmm6
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm7, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm7, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm6
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm7, 1
- pslld xmm6, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm6, xmm2
- por xmm7, xmm0
- por xmm6, xmm1
- movdqa xmm0, xmm7
- movdqa xmm1, xmm7
- movdqa xmm2, xmm7
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm7, xmm0
- movdqa xmm2, xmm7
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm7
- pxor xmm6, xmm2
- L_AES_GCM_decrypt_aesni_calc_aad_done:
- ; Calculate counter and H
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm9, xmm5
- paddd xmm4, OWORD PTR L_aes_gcm_one
- movdqa xmm8, xmm5
- movdqu [rsp+128], xmm4
- psrlq xmm9, 63
- psllq xmm8, 1
- pslldq xmm9, 8
- por xmm8, xmm9
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm8
- xor ebx, ebx
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_decrypt_aesni_done_128
- and r13d, 4294967168
- movdqa xmm2, xmm6
- ; H ^ 1
- movdqu [rsp], xmm5
- ; H ^ 2
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm5, 78
- movdqa xmm11, xmm5
- movdqa xmm8, xmm5
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm5
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm0, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm0, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm0, xmm14
- movdqu [rsp+16], xmm0
- ; H ^ 3
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm1, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm1, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm1, xmm14
- movdqu [rsp+32], xmm1
- ; H ^ 4
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm3, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm3, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm3, xmm14
- movdqu [rsp+48], xmm3
- ; H ^ 5
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+64], xmm7
- ; H ^ 6
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+80], xmm7
- ; H ^ 7
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+96], xmm7
- ; H ^ 8
- pshufd xmm9, xmm3, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm3, 17
- pclmulqdq xmm8, xmm3, 0
- pxor xmm9, xmm3
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+112], xmm7
- L_AES_GCM_decrypt_aesni_ghash_128:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- movdqu xmm8, [rsp+128]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [r15]
- movdqu [rsp+128], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqu xmm7, [rsp+112]
- movdqu xmm0, [rcx]
- aesenc xmm8, [r15+16]
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm0, xmm2
- pshufd xmm1, xmm7, 78
- pshufd xmm5, xmm0, 78
- pxor xmm1, xmm7
- pxor xmm5, xmm0
- movdqa xmm3, xmm0
- pclmulqdq xmm3, xmm7, 17
- aesenc xmm9, [r15+16]
- aesenc xmm10, [r15+16]
- movdqa xmm2, xmm0
- pclmulqdq xmm2, xmm7, 0
- aesenc xmm11, [r15+16]
- aesenc xmm12, [r15+16]
- pclmulqdq xmm1, xmm5, 0
- aesenc xmm13, [r15+16]
- aesenc xmm14, [r15+16]
- aesenc xmm15, [r15+16]
- pxor xmm1, xmm2
- pxor xmm1, xmm3
- movdqu xmm7, [rsp+96]
- movdqu xmm0, [rcx+16]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+32]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+32]
- aesenc xmm10, [r15+32]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+32]
- aesenc xmm12, [r15+32]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+32]
- aesenc xmm14, [r15+32]
- aesenc xmm15, [r15+32]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+80]
- movdqu xmm0, [rcx+32]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+48]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+48]
- aesenc xmm10, [r15+48]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+48]
- aesenc xmm12, [r15+48]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+48]
- aesenc xmm14, [r15+48]
- aesenc xmm15, [r15+48]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+64]
- movdqu xmm0, [rcx+48]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+64]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+64]
- aesenc xmm10, [r15+64]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+64]
- aesenc xmm12, [r15+64]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+64]
- aesenc xmm14, [r15+64]
- aesenc xmm15, [r15+64]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+48]
- movdqu xmm0, [rcx+64]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+80]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+80]
- aesenc xmm10, [r15+80]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+80]
- aesenc xmm12, [r15+80]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+80]
- aesenc xmm14, [r15+80]
- aesenc xmm15, [r15+80]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+32]
- movdqu xmm0, [rcx+80]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+96]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+96]
- aesenc xmm10, [r15+96]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+96]
- aesenc xmm12, [r15+96]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+96]
- aesenc xmm14, [r15+96]
- aesenc xmm15, [r15+96]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+16]
- movdqu xmm0, [rcx+96]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+112]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+112]
- aesenc xmm10, [r15+112]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+112]
- aesenc xmm12, [r15+112]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+112]
- aesenc xmm14, [r15+112]
- aesenc xmm15, [r15+112]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp]
- movdqu xmm0, [rcx+112]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [r15+128]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [r15+128]
- aesenc xmm10, [r15+128]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [r15+128]
- aesenc xmm12, [r15+128]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [r15+128]
- aesenc xmm14, [r15+128]
- aesenc xmm15, [r15+128]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqa xmm5, xmm1
- psrldq xmm1, 8
- pslldq xmm5, 8
- aesenc xmm8, [r15+144]
- pxor xmm2, xmm5
- pxor xmm3, xmm1
- movdqa xmm7, xmm2
- movdqa xmm4, xmm2
- movdqa xmm5, xmm2
- aesenc xmm9, [r15+144]
- pslld xmm7, 31
- pslld xmm4, 30
- pslld xmm5, 25
- aesenc xmm10, [r15+144]
- pxor xmm7, xmm4
- pxor xmm7, xmm5
- aesenc xmm11, [r15+144]
- movdqa xmm4, xmm7
- pslldq xmm7, 12
- psrldq xmm4, 4
- aesenc xmm12, [r15+144]
- pxor xmm2, xmm7
- movdqa xmm5, xmm2
- movdqa xmm1, xmm2
- movdqa xmm0, xmm2
- aesenc xmm13, [r15+144]
- psrld xmm5, 1
- psrld xmm1, 2
- psrld xmm0, 7
- aesenc xmm14, [r15+144]
- pxor xmm5, xmm1
- pxor xmm5, xmm0
- aesenc xmm15, [r15+144]
- pxor xmm5, xmm4
- pxor xmm2, xmm5
- pxor xmm2, xmm3
- cmp r10d, 11
- movdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r10d, 13
- movdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_aesenc_128_ghash_avx_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [rcx]
- movdqu xmm1, [rcx+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [rdx], xmm8
- movdqu [rdx+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [rcx+32]
- movdqu xmm1, [rcx+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [rdx+32], xmm10
- movdqu [rdx+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [rcx+64]
- movdqu xmm1, [rcx+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [rdx+64], xmm12
- movdqu [rdx+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [rcx+96]
- movdqu xmm1, [rcx+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [rdx+96], xmm14
- movdqu [rdx+112], xmm15
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_aesni_ghash_128
- movdqa xmm6, xmm2
- movdqu xmm5, [rsp]
- L_AES_GCM_decrypt_aesni_done_128:
- mov edx, r9d
- cmp ebx, edx
- jge L_AES_GCM_decrypt_aesni_done_dec
- mov r13d, r9d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_decrypt_aesni_last_block_done
- L_AES_GCM_decrypt_aesni_last_block_start:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- movdqu xmm1, [rcx]
- movdqa xmm0, xmm5
- pshufb xmm1, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm1, xmm6
- movdqu xmm8, [rsp+128]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [r15]
- movdqu [rsp+128], xmm9
- movdqa xmm10, xmm1
- pclmulqdq xmm10, xmm0, 16
- aesenc xmm8, [r15+16]
- aesenc xmm8, [r15+32]
- movdqa xmm11, xmm1
- pclmulqdq xmm11, xmm0, 1
- aesenc xmm8, [r15+48]
- aesenc xmm8, [r15+64]
- movdqa xmm12, xmm1
- pclmulqdq xmm12, xmm0, 0
- aesenc xmm8, [r15+80]
- movdqa xmm1, xmm1
- pclmulqdq xmm1, xmm0, 17
- aesenc xmm8, [r15+96]
- pxor xmm10, xmm11
- movdqa xmm2, xmm10
- psrldq xmm10, 8
- pslldq xmm2, 8
- aesenc xmm8, [r15+112]
- movdqa xmm3, xmm1
- pxor xmm2, xmm12
- pxor xmm3, xmm10
- movdqa xmm0, OWORD PTR L_aes_gcm_mod2_128
- movdqa xmm11, xmm2
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [r15+128]
- pshufd xmm10, xmm2, 78
- pxor xmm10, xmm11
- movdqa xmm11, xmm10
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [r15+144]
- pshufd xmm6, xmm10, 78
- pxor xmm6, xmm11
- pxor xmm6, xmm3
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_aesenc_gfmul_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- add ebx, 16
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_aesni_last_block_start
- L_AES_GCM_decrypt_aesni_last_block_done:
- mov ecx, r9d
- mov edx, ecx
- and ecx, 15
- jz L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_done
- movdqu xmm4, [rsp+128]
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_epi64
- pxor xmm4, [r15]
- aesenc xmm4, [r15+16]
- aesenc xmm4, [r15+32]
- aesenc xmm4, [r15+48]
- aesenc xmm4, [r15+64]
- aesenc xmm4, [r15+80]
- aesenc xmm4, [r15+96]
- aesenc xmm4, [r15+112]
- aesenc xmm4, [r15+128]
- aesenc xmm4, [r15+144]
- cmp r10d, 11
- movdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_aesenc_avx_last
- aesenc xmm4, xmm9
- aesenc xmm4, [r15+176]
- cmp r10d, 13
- movdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_aesenc_avx_last
- aesenc xmm4, xmm9
- aesenc xmm4, [r15+208]
- movdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_aesenc_avx_last:
- aesenclast xmm4, xmm9
- sub rsp, 32
- xor ecx, ecx
- movdqu [rsp], xmm4
- pxor xmm0, xmm0
- movdqu [rsp+16], xmm0
- L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- mov BYTE PTR [rsp+rcx+16], r13b
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [rsi+rbx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_loop
- movdqu xmm4, [rsp+16]
- add rsp, 32
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm4
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- L_AES_GCM_decrypt_aesni_aesenc_last15_dec_avx_done:
- L_AES_GCM_decrypt_aesni_done_dec:
- mov edx, r9d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- pinsrq xmm0, rdx, 0
- pinsrq xmm0, rcx, 1
- pxor xmm6, xmm0
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- pshufb xmm6, OWORD PTR L_aes_gcm_bswap_mask
- movdqu xmm0, [rsp+144]
- pxor xmm0, xmm6
- cmp r14d, 16
- je L_AES_GCM_decrypt_aesni_cmp_tag_16
- sub rsp, 16
- xor rcx, rcx
- xor rbx, rbx
- movdqu [rsp], xmm0
- L_AES_GCM_decrypt_aesni_cmp_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- xor r13b, BYTE PTR [r8+rcx]
- or bl, r13b
- inc ecx
- cmp ecx, r14d
- jne L_AES_GCM_decrypt_aesni_cmp_tag_loop
- cmp rbx, 0
- sete bl
- add rsp, 16
- xor rcx, rcx
- jmp L_AES_GCM_decrypt_aesni_cmp_tag_done
- L_AES_GCM_decrypt_aesni_cmp_tag_16:
- movdqu xmm1, [r8]
- pcmpeqb xmm0, xmm1
- pmovmskb rdx, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor ebx, ebx
- cmp edx, 65535
- sete bl
- L_AES_GCM_decrypt_aesni_cmp_tag_done:
- mov DWORD PTR [rbp], ebx
- movdqu xmm6, [rsp+168]
- movdqu xmm7, [rsp+184]
- movdqu xmm8, [rsp+200]
- movdqu xmm9, [rsp+216]
- movdqu xmm10, [rsp+232]
- movdqu xmm11, [rsp+248]
- movdqu xmm12, [rsp+264]
- movdqu xmm13, [rsp+280]
- movdqu xmm14, [rsp+296]
- movdqu xmm15, [rsp+312]
- add rsp, 328
- pop rbp
- pop r15
- pop r14
- pop rbx
- pop r12
- pop rsi
- pop rdi
- pop r13
- ret
- AES_GCM_decrypt_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_init_aesni PROC
- push rdi
- push rsi
- push r12
- push r13
- push r14
- mov rdi, rcx
- mov rsi, rdx
- mov r10, r8
- mov r11d, r9d
- mov rax, QWORD PTR [rsp+80]
- mov r8, QWORD PTR [rsp+88]
- mov r9, QWORD PTR [rsp+96]
- sub rsp, 80
- movdqu [rsp+16], xmm6
- movdqu [rsp+32], xmm7
- movdqu [rsp+48], xmm8
- movdqu [rsp+64], xmm15
- pxor xmm4, xmm4
- mov edx, r11d
- cmp edx, 12
- jne L_AES_GCM_init_aesni_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- pinsrq xmm4, QWORD PTR [r10], 0
- pinsrd xmm4, DWORD PTR [r10+8], 2
- pinsrd xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- movdqa xmm1, xmm4
- movdqa xmm5, OWORD PTR [rdi]
- pxor xmm1, xmm5
- movdqa xmm6, OWORD PTR [rdi+16]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+32]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+48]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+64]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+80]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+96]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+112]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+128]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+144]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- cmp esi, 11
- movdqa xmm6, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_aesni_calc_iv_12_last
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+176]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- cmp esi, 13
- movdqa xmm6, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_aesni_calc_iv_12_last
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+208]
- aesenc xmm5, xmm6
- aesenc xmm1, xmm6
- movdqa xmm6, OWORD PTR [rdi+224]
- L_AES_GCM_init_aesni_calc_iv_12_last:
- aesenclast xmm5, xmm6
- aesenclast xmm1, xmm6
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- movdqu xmm15, xmm1
- jmp L_AES_GCM_init_aesni_iv_done
- L_AES_GCM_init_aesni_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- movdqa xmm5, OWORD PTR [rdi]
- aesenc xmm5, [rdi+16]
- aesenc xmm5, [rdi+32]
- aesenc xmm5, [rdi+48]
- aesenc xmm5, [rdi+64]
- aesenc xmm5, [rdi+80]
- aesenc xmm5, [rdi+96]
- aesenc xmm5, [rdi+112]
- aesenc xmm5, [rdi+128]
- aesenc xmm5, [rdi+144]
- cmp esi, 11
- movdqa xmm8, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm8
- aesenc xmm5, [rdi+176]
- cmp esi, 13
- movdqa xmm8, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_aesni_calc_iv_1_aesenc_avx_last
- aesenc xmm5, xmm8
- aesenc xmm5, [rdi+208]
- movdqa xmm8, OWORD PTR [rdi+224]
- L_AES_GCM_init_aesni_calc_iv_1_aesenc_avx_last:
- aesenclast xmm5, xmm8
- pshufb xmm5, OWORD PTR L_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_init_aesni_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_init_aesni_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_init_aesni_calc_iv_16_loop:
- movdqu xmm7, [r10+rcx]
- pshufb xmm7, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm7
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm6, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm6, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm6, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm6, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm6
- movdqa xmm2, xmm6
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm6, xmm0
- movdqa xmm2, xmm6
- movdqa xmm3, xmm6
- movdqa xmm0, xmm6
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm6
- pxor xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_init_aesni_calc_iv_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_init_aesni_calc_iv_done
- L_AES_GCM_init_aesni_calc_iv_lt16:
- sub rsp, 16
- pxor xmm7, xmm7
- xor r13d, r13d
- movdqu [rsp], xmm7
- L_AES_GCM_init_aesni_calc_iv_loop:
- movzx r12d, BYTE PTR [r10+rcx]
- mov BYTE PTR [rsp+r13], r12b
- inc ecx
- inc r13d
- cmp ecx, edx
- jl L_AES_GCM_init_aesni_calc_iv_loop
- movdqu xmm7, [rsp]
- add rsp, 16
- pshufb xmm7, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm7
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm6, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm6, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm6, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm6, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm6
- movdqa xmm2, xmm6
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm6, xmm0
- movdqa xmm2, xmm6
- movdqa xmm3, xmm6
- movdqa xmm0, xmm6
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm6
- pxor xmm4, xmm2
- L_AES_GCM_init_aesni_calc_iv_done:
- ; T = Encrypt counter
- pxor xmm0, xmm0
- shl edx, 3
- pinsrq xmm0, rdx, 0
- pxor xmm4, xmm0
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm6, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm6, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm6, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm6, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm6
- movdqa xmm2, xmm6
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm6, xmm0
- movdqa xmm2, xmm6
- movdqa xmm3, xmm6
- movdqa xmm0, xmm6
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm6
- pxor xmm4, xmm2
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- ; Encrypt counter
- movdqa xmm7, OWORD PTR [rdi]
- pxor xmm7, xmm4
- aesenc xmm7, [rdi+16]
- aesenc xmm7, [rdi+32]
- aesenc xmm7, [rdi+48]
- aesenc xmm7, [rdi+64]
- aesenc xmm7, [rdi+80]
- aesenc xmm7, [rdi+96]
- aesenc xmm7, [rdi+112]
- aesenc xmm7, [rdi+128]
- aesenc xmm7, [rdi+144]
- cmp esi, 11
- movdqa xmm8, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm7, xmm8
- aesenc xmm7, [rdi+176]
- cmp esi, 13
- movdqa xmm8, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_aesni_calc_iv_2_aesenc_avx_last
- aesenc xmm7, xmm8
- aesenc xmm7, [rdi+208]
- movdqa xmm8, OWORD PTR [rdi+224]
- L_AES_GCM_init_aesni_calc_iv_2_aesenc_avx_last:
- aesenclast xmm7, xmm8
- movdqu xmm15, xmm7
- L_AES_GCM_init_aesni_iv_done:
- movdqa OWORD PTR [r9], xmm15
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm4, OWORD PTR L_aes_gcm_one
- movdqa OWORD PTR [rax], xmm5
- movdqa OWORD PTR [r8], xmm4
- movdqu xmm6, [rsp+16]
- movdqu xmm7, [rsp+32]
- movdqu xmm8, [rsp+48]
- movdqu xmm15, [rsp+64]
- add rsp, 80
- pop r14
- pop r13
- pop r12
- pop rsi
- pop rdi
- ret
- AES_GCM_init_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_aad_update_aesni PROC
- mov rax, rcx
- sub rsp, 32
- movdqu [rsp], xmm6
- movdqu [rsp+16], xmm7
- movdqa xmm5, OWORD PTR [r8]
- movdqa xmm6, OWORD PTR [r9]
- xor ecx, ecx
- L_AES_GCM_aad_update_aesni_16_loop:
- movdqu xmm7, [rax+rcx]
- pshufb xmm7, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm5, xmm7
- pshufd xmm1, xmm5, 78
- pshufd xmm2, xmm6, 78
- movdqa xmm3, xmm6
- movdqa xmm0, xmm6
- pclmulqdq xmm3, xmm5, 17
- pclmulqdq xmm0, xmm5, 0
- pxor xmm1, xmm5
- pxor xmm2, xmm6
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm4, xmm0
- movdqa xmm5, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm5, xmm1
- movdqa xmm0, xmm4
- movdqa xmm1, xmm5
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm4, 1
- pslld xmm5, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm5, xmm2
- por xmm4, xmm0
- por xmm5, xmm1
- movdqa xmm0, xmm4
- movdqa xmm1, xmm4
- movdqa xmm2, xmm4
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm4, xmm0
- movdqa xmm2, xmm4
- movdqa xmm3, xmm4
- movdqa xmm0, xmm4
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm4
- pxor xmm5, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_aad_update_aesni_16_loop
- movdqa OWORD PTR [r8], xmm5
- movdqu xmm6, [rsp]
- movdqu xmm7, [rsp+16]
- add rsp, 32
- ret
- AES_GCM_aad_update_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_block_aesni PROC
- mov r10, r8
- mov r11, r9
- mov rax, QWORD PTR [rsp+40]
- movdqu xmm0, [rax]
- movdqa xmm1, xmm0
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm1, OWORD PTR L_aes_gcm_one
- pxor xmm0, [rcx]
- movdqu [rax], xmm1
- aesenc xmm0, [rcx+16]
- aesenc xmm0, [rcx+32]
- aesenc xmm0, [rcx+48]
- aesenc xmm0, [rcx+64]
- aesenc xmm0, [rcx+80]
- aesenc xmm0, [rcx+96]
- aesenc xmm0, [rcx+112]
- aesenc xmm0, [rcx+128]
- aesenc xmm0, [rcx+144]
- cmp edx, 11
- movdqa xmm1, OWORD PTR [rcx+160]
- jl L_AES_GCM_encrypt_block_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm0, xmm1
- aesenc xmm0, [rcx+176]
- cmp edx, 13
- movdqa xmm1, OWORD PTR [rcx+192]
- jl L_AES_GCM_encrypt_block_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm0, xmm1
- aesenc xmm0, [rcx+208]
- movdqa xmm1, OWORD PTR [rcx+224]
- L_AES_GCM_encrypt_block_aesni_aesenc_block_aesenc_avx_last:
- aesenclast xmm0, xmm1
- movdqu xmm1, [r11]
- pxor xmm0, xmm1
- movdqu [r10], xmm0
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- ret
- AES_GCM_encrypt_block_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_ghash_block_aesni PROC
- sub rsp, 32
- movdqu [rsp], xmm6
- movdqu [rsp+16], xmm7
- movdqa xmm4, OWORD PTR [rdx]
- movdqa xmm5, OWORD PTR [r8]
- movdqu xmm7, [rcx]
- pshufb xmm7, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm4, xmm7
- pshufd xmm1, xmm4, 78
- pshufd xmm2, xmm5, 78
- movdqa xmm3, xmm5
- movdqa xmm0, xmm5
- pclmulqdq xmm3, xmm4, 17
- pclmulqdq xmm0, xmm4, 0
- pxor xmm1, xmm4
- pxor xmm2, xmm5
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm6, xmm0
- movdqa xmm4, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm6, xmm2
- pxor xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm4
- psrld xmm0, 31
- psrld xmm1, 31
- pslld xmm6, 1
- pslld xmm4, 1
- movdqa xmm2, xmm0
- pslldq xmm0, 4
- psrldq xmm2, 12
- pslldq xmm1, 4
- por xmm4, xmm2
- por xmm6, xmm0
- por xmm4, xmm1
- movdqa xmm0, xmm6
- movdqa xmm1, xmm6
- movdqa xmm2, xmm6
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm6, xmm0
- movdqa xmm2, xmm6
- movdqa xmm3, xmm6
- movdqa xmm0, xmm6
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm6
- pxor xmm4, xmm2
- movdqa OWORD PTR [rdx], xmm4
- movdqu xmm6, [rsp]
- movdqu xmm7, [rsp+16]
- add rsp, 32
- ret
- AES_GCM_ghash_block_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_update_aesni PROC
- push r13
- push r12
- push r14
- push r15
- push rdi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov r15, QWORD PTR [rsp+104]
- sub rsp, 320
- movdqu [rsp+160], xmm6
- movdqu [rsp+176], xmm7
- movdqu [rsp+192], xmm8
- movdqu [rsp+208], xmm9
- movdqu [rsp+224], xmm10
- movdqu [rsp+240], xmm11
- movdqu [rsp+256], xmm12
- movdqu [rsp+272], xmm13
- movdqu [rsp+288], xmm14
- movdqu [rsp+304], xmm15
- movdqa xmm6, OWORD PTR [r12]
- movdqa xmm5, OWORD PTR [r14]
- movdqa xmm9, xmm5
- movdqa xmm8, xmm5
- psrlq xmm9, 63
- psllq xmm8, 1
- pslldq xmm9, 8
- por xmm8, xmm9
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm8
- xor rdi, rdi
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_encrypt_update_aesni_done_128
- and r13d, 4294967168
- movdqa xmm2, xmm6
- ; H ^ 1
- movdqu [rsp], xmm5
- ; H ^ 2
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm5, 78
- movdqa xmm11, xmm5
- movdqa xmm8, xmm5
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm5
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm0, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm0, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm0, xmm14
- movdqu [rsp+16], xmm0
- ; H ^ 3
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm1, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm1, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm1, xmm14
- movdqu [rsp+32], xmm1
- ; H ^ 4
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm3, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm3, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm3, xmm14
- movdqu [rsp+48], xmm3
- ; H ^ 5
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+64], xmm7
- ; H ^ 6
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+80], xmm7
- ; H ^ 7
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+96], xmm7
- ; H ^ 8
- pshufd xmm9, xmm3, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm3, 17
- pclmulqdq xmm8, xmm3, 0
- pxor xmm9, xmm3
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+112], xmm7
- ; First 128 bytes of input
- movdqu xmm8, [r15]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [rax]
- movdqu [r15], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+16]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+32]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+48]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+64]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+80]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+96]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+112]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+128]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+144]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r8d, 11
- movdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_aesni_enc_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r8d, 13
- movdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_aesni_enc_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_aesni_enc_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [r11]
- movdqu xmm1, [r11+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [r10], xmm8
- movdqu [r10+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [r11+32]
- movdqu xmm1, [r11+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [r10+32], xmm10
- movdqu [r10+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [r11+64]
- movdqu xmm1, [r11+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [r10+64], xmm12
- movdqu [r10+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [r11+96]
- movdqu xmm1, [r11+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [r10+96], xmm14
- movdqu [r10+112], xmm15
- cmp r13d, 128
- mov edi, 128
- jle L_AES_GCM_encrypt_update_aesni_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_update_aesni_ghash_128:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- movdqu xmm8, [r15]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [rax]
- movdqu [r15], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqu xmm7, [rsp+112]
- movdqu xmm0, [rdx+-128]
- aesenc xmm8, [rax+16]
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm0, xmm2
- pshufd xmm1, xmm7, 78
- pshufd xmm5, xmm0, 78
- pxor xmm1, xmm7
- pxor xmm5, xmm0
- movdqa xmm3, xmm0
- pclmulqdq xmm3, xmm7, 17
- aesenc xmm9, [rax+16]
- aesenc xmm10, [rax+16]
- movdqa xmm2, xmm0
- pclmulqdq xmm2, xmm7, 0
- aesenc xmm11, [rax+16]
- aesenc xmm12, [rax+16]
- pclmulqdq xmm1, xmm5, 0
- aesenc xmm13, [rax+16]
- aesenc xmm14, [rax+16]
- aesenc xmm15, [rax+16]
- pxor xmm1, xmm2
- pxor xmm1, xmm3
- movdqu xmm7, [rsp+96]
- movdqu xmm0, [rdx+-112]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+32]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+32]
- aesenc xmm10, [rax+32]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+32]
- aesenc xmm12, [rax+32]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+32]
- aesenc xmm14, [rax+32]
- aesenc xmm15, [rax+32]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+80]
- movdqu xmm0, [rdx+-96]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+48]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+48]
- aesenc xmm10, [rax+48]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+48]
- aesenc xmm12, [rax+48]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+48]
- aesenc xmm14, [rax+48]
- aesenc xmm15, [rax+48]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+64]
- movdqu xmm0, [rdx+-80]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+64]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+64]
- aesenc xmm10, [rax+64]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+64]
- aesenc xmm12, [rax+64]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+64]
- aesenc xmm14, [rax+64]
- aesenc xmm15, [rax+64]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+48]
- movdqu xmm0, [rdx+-64]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+80]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+80]
- aesenc xmm10, [rax+80]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+80]
- aesenc xmm12, [rax+80]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+80]
- aesenc xmm14, [rax+80]
- aesenc xmm15, [rax+80]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+32]
- movdqu xmm0, [rdx+-48]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+96]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+96]
- aesenc xmm10, [rax+96]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+96]
- aesenc xmm12, [rax+96]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+96]
- aesenc xmm14, [rax+96]
- aesenc xmm15, [rax+96]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+16]
- movdqu xmm0, [rdx+-32]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+112]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+112]
- aesenc xmm10, [rax+112]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+112]
- aesenc xmm12, [rax+112]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+112]
- aesenc xmm14, [rax+112]
- aesenc xmm15, [rax+112]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp]
- movdqu xmm0, [rdx+-16]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+128]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+128]
- aesenc xmm10, [rax+128]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+128]
- aesenc xmm12, [rax+128]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+128]
- aesenc xmm14, [rax+128]
- aesenc xmm15, [rax+128]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqa xmm5, xmm1
- psrldq xmm1, 8
- pslldq xmm5, 8
- aesenc xmm8, [rax+144]
- pxor xmm2, xmm5
- pxor xmm3, xmm1
- movdqa xmm7, xmm2
- movdqa xmm4, xmm2
- movdqa xmm5, xmm2
- aesenc xmm9, [rax+144]
- pslld xmm7, 31
- pslld xmm4, 30
- pslld xmm5, 25
- aesenc xmm10, [rax+144]
- pxor xmm7, xmm4
- pxor xmm7, xmm5
- aesenc xmm11, [rax+144]
- movdqa xmm4, xmm7
- pslldq xmm7, 12
- psrldq xmm4, 4
- aesenc xmm12, [rax+144]
- pxor xmm2, xmm7
- movdqa xmm5, xmm2
- movdqa xmm1, xmm2
- movdqa xmm0, xmm2
- aesenc xmm13, [rax+144]
- psrld xmm5, 1
- psrld xmm1, 2
- psrld xmm0, 7
- aesenc xmm14, [rax+144]
- pxor xmm5, xmm1
- pxor xmm5, xmm0
- aesenc xmm15, [rax+144]
- pxor xmm5, xmm4
- pxor xmm2, xmm5
- pxor xmm2, xmm3
- cmp r8d, 11
- movdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r8d, 13
- movdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_aesni_aesenc_128_ghash_avx_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [rcx]
- movdqu xmm1, [rcx+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [rdx], xmm8
- movdqu [rdx+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [rcx+32]
- movdqu xmm1, [rcx+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [rdx+32], xmm10
- movdqu [rdx+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [rcx+64]
- movdqu xmm1, [rcx+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [rdx+64], xmm12
- movdqu [rdx+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [rcx+96]
- movdqu xmm1, [rcx+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [rdx+96], xmm14
- movdqu [rdx+112], xmm15
- add edi, 128
- cmp edi, r13d
- jl L_AES_GCM_encrypt_update_aesni_ghash_128
- L_AES_GCM_encrypt_update_aesni_end_128:
- movdqa xmm4, OWORD PTR L_aes_gcm_bswap_mask
- pshufb xmm8, xmm4
- pshufb xmm9, xmm4
- pshufb xmm10, xmm4
- pshufb xmm11, xmm4
- pxor xmm8, xmm2
- pshufb xmm12, xmm4
- pshufb xmm13, xmm4
- pshufb xmm14, xmm4
- pshufb xmm15, xmm4
- movdqu xmm7, [rsp+112]
- pshufd xmm1, xmm8, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm8, 17
- pclmulqdq xmm0, xmm8, 0
- pxor xmm1, xmm8
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- movdqa xmm4, xmm0
- movdqa xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+96]
- pshufd xmm1, xmm9, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm9, 17
- pclmulqdq xmm0, xmm9, 0
- pxor xmm1, xmm9
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+80]
- pshufd xmm1, xmm10, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm10, 17
- pclmulqdq xmm0, xmm10, 0
- pxor xmm1, xmm10
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+64]
- pshufd xmm1, xmm11, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm11, 17
- pclmulqdq xmm0, xmm11, 0
- pxor xmm1, xmm11
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+48]
- pshufd xmm1, xmm12, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm12, 17
- pclmulqdq xmm0, xmm12, 0
- pxor xmm1, xmm12
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+32]
- pshufd xmm1, xmm13, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm13, 17
- pclmulqdq xmm0, xmm13, 0
- pxor xmm1, xmm13
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp+16]
- pshufd xmm1, xmm14, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm14, 17
- pclmulqdq xmm0, xmm14, 0
- pxor xmm1, xmm14
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqu xmm7, [rsp]
- pshufd xmm1, xmm15, 78
- pshufd xmm2, xmm7, 78
- movdqa xmm3, xmm7
- movdqa xmm0, xmm7
- pclmulqdq xmm3, xmm15, 17
- pclmulqdq xmm0, xmm15, 0
- pxor xmm1, xmm15
- pxor xmm2, xmm7
- pclmulqdq xmm1, xmm2, 0
- pxor xmm1, xmm0
- pxor xmm1, xmm3
- movdqa xmm2, xmm1
- pxor xmm4, xmm0
- pxor xmm6, xmm3
- pslldq xmm2, 8
- psrldq xmm1, 8
- pxor xmm4, xmm2
- pxor xmm6, xmm1
- movdqa xmm0, xmm4
- movdqa xmm1, xmm4
- movdqa xmm2, xmm4
- pslld xmm0, 31
- pslld xmm1, 30
- pslld xmm2, 25
- pxor xmm0, xmm1
- pxor xmm0, xmm2
- movdqa xmm1, xmm0
- psrldq xmm1, 4
- pslldq xmm0, 12
- pxor xmm4, xmm0
- movdqa xmm2, xmm4
- movdqa xmm3, xmm4
- movdqa xmm0, xmm4
- psrld xmm2, 1
- psrld xmm3, 2
- psrld xmm0, 7
- pxor xmm2, xmm3
- pxor xmm2, xmm0
- pxor xmm2, xmm1
- pxor xmm2, xmm4
- pxor xmm6, xmm2
- movdqu xmm5, [rsp]
- L_AES_GCM_encrypt_update_aesni_done_128:
- mov edx, r9d
- cmp edi, edx
- jge L_AES_GCM_encrypt_update_aesni_done_enc
- mov r13d, r9d
- and r13d, 4294967280
- cmp edi, r13d
- jge L_AES_GCM_encrypt_update_aesni_last_block_done
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- movdqu xmm8, [r15]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [rax]
- movdqu [r15], xmm9
- aesenc xmm8, [rax+16]
- aesenc xmm8, [rax+32]
- aesenc xmm8, [rax+48]
- aesenc xmm8, [rax+64]
- aesenc xmm8, [rax+80]
- aesenc xmm8, [rax+96]
- aesenc xmm8, [rax+112]
- aesenc xmm8, [rax+128]
- aesenc xmm8, [rax+144]
- cmp r8d, 11
- movdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+176]
- cmp r8d, 13
- movdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_block_aesenc_avx_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+208]
- movdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_aesni_aesenc_block_aesenc_avx_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- add edi, 16
- cmp edi, r13d
- jge L_AES_GCM_encrypt_update_aesni_last_block_ghash
- L_AES_GCM_encrypt_update_aesni_last_block_start:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- movdqu xmm8, [r15]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [rax]
- movdqu [r15], xmm9
- movdqa xmm10, xmm6
- pclmulqdq xmm10, xmm5, 16
- aesenc xmm8, [rax+16]
- aesenc xmm8, [rax+32]
- movdqa xmm11, xmm6
- pclmulqdq xmm11, xmm5, 1
- aesenc xmm8, [rax+48]
- aesenc xmm8, [rax+64]
- movdqa xmm12, xmm6
- pclmulqdq xmm12, xmm5, 0
- aesenc xmm8, [rax+80]
- movdqa xmm1, xmm6
- pclmulqdq xmm1, xmm5, 17
- aesenc xmm8, [rax+96]
- pxor xmm10, xmm11
- movdqa xmm2, xmm10
- psrldq xmm10, 8
- pslldq xmm2, 8
- aesenc xmm8, [rax+112]
- movdqa xmm3, xmm1
- pxor xmm2, xmm12
- pxor xmm3, xmm10
- movdqa xmm0, OWORD PTR L_aes_gcm_mod2_128
- movdqa xmm11, xmm2
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [rax+128]
- pshufd xmm10, xmm2, 78
- pxor xmm10, xmm11
- movdqa xmm11, xmm10
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [rax+144]
- pshufd xmm6, xmm10, 78
- pxor xmm6, xmm11
- pxor xmm6, xmm3
- cmp r8d, 11
- movdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+176]
- cmp r8d, 13
- movdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+208]
- movdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_aesni_aesenc_gfmul_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm6, xmm8
- add edi, 16
- cmp edi, r13d
- jl L_AES_GCM_encrypt_update_aesni_last_block_start
- L_AES_GCM_encrypt_update_aesni_last_block_ghash:
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm6, 78
- movdqa xmm11, xmm6
- movdqa xmm8, xmm6
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm6
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm6, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm6, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm6, xmm14
- L_AES_GCM_encrypt_update_aesni_last_block_done:
- L_AES_GCM_encrypt_update_aesni_done_enc:
- movdqa OWORD PTR [r12], xmm6
- movdqu xmm6, [rsp+160]
- movdqu xmm7, [rsp+176]
- movdqu xmm8, [rsp+192]
- movdqu xmm9, [rsp+208]
- movdqu xmm10, [rsp+224]
- movdqu xmm11, [rsp+240]
- movdqu xmm12, [rsp+256]
- movdqu xmm13, [rsp+272]
- movdqu xmm14, [rsp+288]
- movdqu xmm15, [rsp+304]
- add rsp, 320
- pop rdi
- pop r15
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_encrypt_update_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_final_aesni PROC
- push r13
- push r12
- push r14
- mov rax, rcx
- mov r10d, r9d
- mov r9, rdx
- mov r11d, DWORD PTR [rsp+64]
- mov r12, QWORD PTR [rsp+72]
- mov r14, QWORD PTR [rsp+80]
- sub rsp, 144
- movdqu [rsp+16], xmm6
- movdqu [rsp+32], xmm7
- movdqu [rsp+48], xmm8
- movdqu [rsp+64], xmm9
- movdqu [rsp+80], xmm10
- movdqu [rsp+96], xmm11
- movdqu [rsp+112], xmm12
- movdqu [rsp+128], xmm13
- movdqa xmm4, OWORD PTR [rax]
- movdqa xmm5, OWORD PTR [r12]
- movdqa xmm6, OWORD PTR [r14]
- movdqa xmm8, xmm5
- movdqa xmm7, xmm5
- psrlq xmm8, 63
- psllq xmm7, 1
- pslldq xmm8, 8
- por xmm7, xmm8
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm7
- mov edx, r10d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- pinsrq xmm0, rdx, 0
- pinsrq xmm0, rcx, 1
- pxor xmm4, xmm0
- pshufd xmm8, xmm5, 78
- pshufd xmm9, xmm4, 78
- movdqa xmm10, xmm4
- movdqa xmm7, xmm4
- pclmulqdq xmm10, xmm5, 17
- pclmulqdq xmm7, xmm5, 0
- pxor xmm8, xmm5
- pxor xmm9, xmm4
- pclmulqdq xmm8, xmm9, 0
- pxor xmm8, xmm7
- pxor xmm8, xmm10
- movdqa xmm9, xmm8
- movdqa xmm4, xmm10
- pslldq xmm9, 8
- psrldq xmm8, 8
- pxor xmm7, xmm9
- pxor xmm4, xmm8
- movdqa xmm11, xmm7
- movdqa xmm12, xmm7
- movdqa xmm13, xmm7
- pslld xmm11, 31
- pslld xmm12, 30
- pslld xmm13, 25
- pxor xmm11, xmm12
- pxor xmm11, xmm13
- movdqa xmm12, xmm11
- psrldq xmm12, 4
- pslldq xmm11, 12
- pxor xmm7, xmm11
- movdqa xmm13, xmm7
- movdqa xmm9, xmm7
- movdqa xmm8, xmm7
- psrld xmm13, 1
- psrld xmm9, 2
- psrld xmm8, 7
- pxor xmm13, xmm9
- pxor xmm13, xmm8
- pxor xmm13, xmm12
- pxor xmm13, xmm7
- pxor xmm4, xmm13
- pshufb xmm4, OWORD PTR L_aes_gcm_bswap_mask
- movdqu xmm0, xmm6
- pxor xmm0, xmm4
- cmp r8d, 16
- je L_AES_GCM_encrypt_final_aesni_store_tag_16
- xor rcx, rcx
- movdqu [rsp], xmm0
- L_AES_GCM_encrypt_final_aesni_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r9+rcx], r13b
- inc ecx
- cmp ecx, r8d
- jne L_AES_GCM_encrypt_final_aesni_store_tag_loop
- jmp L_AES_GCM_encrypt_final_aesni_store_tag_done
- L_AES_GCM_encrypt_final_aesni_store_tag_16:
- movdqu [r9], xmm0
- L_AES_GCM_encrypt_final_aesni_store_tag_done:
- movdqu xmm6, [rsp+16]
- movdqu xmm7, [rsp+32]
- movdqu xmm8, [rsp+48]
- movdqu xmm9, [rsp+64]
- movdqu xmm10, [rsp+80]
- movdqu xmm11, [rsp+96]
- movdqu xmm12, [rsp+112]
- movdqu xmm13, [rsp+128]
- add rsp, 144
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_encrypt_final_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_update_aesni PROC
- push r13
- push r12
- push r14
- push r15
- push rdi
- push rsi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+88]
- mov r12, QWORD PTR [rsp+96]
- mov r14, QWORD PTR [rsp+104]
- mov r15, QWORD PTR [rsp+112]
- sub rsp, 328
- movdqu [rsp+168], xmm6
- movdqu [rsp+184], xmm7
- movdqu [rsp+200], xmm8
- movdqu [rsp+216], xmm9
- movdqu [rsp+232], xmm10
- movdqu [rsp+248], xmm11
- movdqu [rsp+264], xmm12
- movdqu [rsp+280], xmm13
- movdqu [rsp+296], xmm14
- movdqu [rsp+312], xmm15
- movdqa xmm6, OWORD PTR [r12]
- movdqa xmm5, OWORD PTR [r14]
- movdqa xmm9, xmm5
- movdqa xmm8, xmm5
- psrlq xmm9, 63
- psllq xmm8, 1
- pslldq xmm9, 8
- por xmm8, xmm9
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm8
- xor edi, edi
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_decrypt_update_aesni_done_128
- and r13d, 4294967168
- movdqa xmm2, xmm6
- ; H ^ 1
- movdqu [rsp], xmm5
- ; H ^ 2
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm5, 78
- movdqa xmm11, xmm5
- movdqa xmm8, xmm5
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm5
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm0, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm0, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm0, xmm14
- movdqu [rsp+16], xmm0
- ; H ^ 3
- pshufd xmm9, xmm5, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm5, 17
- pclmulqdq xmm8, xmm5, 0
- pxor xmm9, xmm5
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm1, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm1, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm1, xmm14
- movdqu [rsp+32], xmm1
- ; H ^ 4
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm0, 78
- movdqa xmm11, xmm0
- movdqa xmm8, xmm0
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm0
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm3, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm3, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm3, xmm14
- movdqu [rsp+48], xmm3
- ; H ^ 5
- pshufd xmm9, xmm0, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm0, 17
- pclmulqdq xmm8, xmm0, 0
- pxor xmm9, xmm0
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+64], xmm7
- ; H ^ 6
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm1, 78
- movdqa xmm11, xmm1
- movdqa xmm8, xmm1
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm1
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+80], xmm7
- ; H ^ 7
- pshufd xmm9, xmm1, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm1, 17
- pclmulqdq xmm8, xmm1, 0
- pxor xmm9, xmm1
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+96], xmm7
- ; H ^ 8
- pshufd xmm9, xmm3, 78
- pshufd xmm10, xmm3, 78
- movdqa xmm11, xmm3
- movdqa xmm8, xmm3
- pclmulqdq xmm11, xmm3, 17
- pclmulqdq xmm8, xmm3, 0
- pxor xmm9, xmm3
- pxor xmm10, xmm3
- pclmulqdq xmm9, xmm10, 0
- pxor xmm9, xmm8
- pxor xmm9, xmm11
- movdqa xmm10, xmm9
- movdqa xmm7, xmm11
- pslldq xmm10, 8
- psrldq xmm9, 8
- pxor xmm8, xmm10
- pxor xmm7, xmm9
- movdqa xmm12, xmm8
- movdqa xmm13, xmm8
- movdqa xmm14, xmm8
- pslld xmm12, 31
- pslld xmm13, 30
- pslld xmm14, 25
- pxor xmm12, xmm13
- pxor xmm12, xmm14
- movdqa xmm13, xmm12
- psrldq xmm13, 4
- pslldq xmm12, 12
- pxor xmm8, xmm12
- movdqa xmm14, xmm8
- movdqa xmm10, xmm8
- movdqa xmm9, xmm8
- psrld xmm14, 1
- psrld xmm10, 2
- psrld xmm9, 7
- pxor xmm14, xmm10
- pxor xmm14, xmm9
- pxor xmm14, xmm13
- pxor xmm14, xmm8
- pxor xmm7, xmm14
- movdqu [rsp+112], xmm7
- L_AES_GCM_decrypt_update_aesni_ghash_128:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- movdqu xmm8, [r15]
- movdqa xmm1, OWORD PTR L_aes_gcm_bswap_epi64
- movdqa xmm0, xmm8
- pshufb xmm8, xmm1
- movdqa xmm9, xmm0
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pshufb xmm9, xmm1
- movdqa xmm10, xmm0
- paddd xmm10, OWORD PTR L_aes_gcm_two
- pshufb xmm10, xmm1
- movdqa xmm11, xmm0
- paddd xmm11, OWORD PTR L_aes_gcm_three
- pshufb xmm11, xmm1
- movdqa xmm12, xmm0
- paddd xmm12, OWORD PTR L_aes_gcm_four
- pshufb xmm12, xmm1
- movdqa xmm13, xmm0
- paddd xmm13, OWORD PTR L_aes_gcm_five
- pshufb xmm13, xmm1
- movdqa xmm14, xmm0
- paddd xmm14, OWORD PTR L_aes_gcm_six
- pshufb xmm14, xmm1
- movdqa xmm15, xmm0
- paddd xmm15, OWORD PTR L_aes_gcm_seven
- pshufb xmm15, xmm1
- paddd xmm0, OWORD PTR L_aes_gcm_eight
- movdqa xmm7, OWORD PTR [rax]
- movdqu [r15], xmm0
- pxor xmm8, xmm7
- pxor xmm9, xmm7
- pxor xmm10, xmm7
- pxor xmm11, xmm7
- pxor xmm12, xmm7
- pxor xmm13, xmm7
- pxor xmm14, xmm7
- pxor xmm15, xmm7
- movdqu xmm7, [rsp+112]
- movdqu xmm0, [rcx]
- aesenc xmm8, [rax+16]
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm0, xmm2
- pshufd xmm1, xmm7, 78
- pshufd xmm5, xmm0, 78
- pxor xmm1, xmm7
- pxor xmm5, xmm0
- movdqa xmm3, xmm0
- pclmulqdq xmm3, xmm7, 17
- aesenc xmm9, [rax+16]
- aesenc xmm10, [rax+16]
- movdqa xmm2, xmm0
- pclmulqdq xmm2, xmm7, 0
- aesenc xmm11, [rax+16]
- aesenc xmm12, [rax+16]
- pclmulqdq xmm1, xmm5, 0
- aesenc xmm13, [rax+16]
- aesenc xmm14, [rax+16]
- aesenc xmm15, [rax+16]
- pxor xmm1, xmm2
- pxor xmm1, xmm3
- movdqu xmm7, [rsp+96]
- movdqu xmm0, [rcx+16]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+32]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+32]
- aesenc xmm10, [rax+32]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+32]
- aesenc xmm12, [rax+32]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+32]
- aesenc xmm14, [rax+32]
- aesenc xmm15, [rax+32]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+80]
- movdqu xmm0, [rcx+32]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+48]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+48]
- aesenc xmm10, [rax+48]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+48]
- aesenc xmm12, [rax+48]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+48]
- aesenc xmm14, [rax+48]
- aesenc xmm15, [rax+48]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+64]
- movdqu xmm0, [rcx+48]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+64]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+64]
- aesenc xmm10, [rax+64]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+64]
- aesenc xmm12, [rax+64]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+64]
- aesenc xmm14, [rax+64]
- aesenc xmm15, [rax+64]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+48]
- movdqu xmm0, [rcx+64]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+80]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+80]
- aesenc xmm10, [rax+80]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+80]
- aesenc xmm12, [rax+80]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+80]
- aesenc xmm14, [rax+80]
- aesenc xmm15, [rax+80]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+32]
- movdqu xmm0, [rcx+80]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+96]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+96]
- aesenc xmm10, [rax+96]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+96]
- aesenc xmm12, [rax+96]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+96]
- aesenc xmm14, [rax+96]
- aesenc xmm15, [rax+96]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp+16]
- movdqu xmm0, [rcx+96]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+112]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+112]
- aesenc xmm10, [rax+112]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+112]
- aesenc xmm12, [rax+112]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+112]
- aesenc xmm14, [rax+112]
- aesenc xmm15, [rax+112]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqu xmm7, [rsp]
- movdqu xmm0, [rcx+112]
- pshufd xmm4, xmm7, 78
- pshufb xmm0, OWORD PTR L_aes_gcm_bswap_mask
- aesenc xmm8, [rax+128]
- pxor xmm4, xmm7
- pshufd xmm5, xmm0, 78
- pxor xmm5, xmm0
- movdqa xmm6, xmm0
- pclmulqdq xmm6, xmm7, 17
- aesenc xmm9, [rax+128]
- aesenc xmm10, [rax+128]
- pclmulqdq xmm7, xmm0, 0
- aesenc xmm11, [rax+128]
- aesenc xmm12, [rax+128]
- pclmulqdq xmm4, xmm5, 0
- aesenc xmm13, [rax+128]
- aesenc xmm14, [rax+128]
- aesenc xmm15, [rax+128]
- pxor xmm1, xmm7
- pxor xmm2, xmm7
- pxor xmm1, xmm6
- pxor xmm3, xmm6
- pxor xmm1, xmm4
- movdqa xmm5, xmm1
- psrldq xmm1, 8
- pslldq xmm5, 8
- aesenc xmm8, [rax+144]
- pxor xmm2, xmm5
- pxor xmm3, xmm1
- movdqa xmm7, xmm2
- movdqa xmm4, xmm2
- movdqa xmm5, xmm2
- aesenc xmm9, [rax+144]
- pslld xmm7, 31
- pslld xmm4, 30
- pslld xmm5, 25
- aesenc xmm10, [rax+144]
- pxor xmm7, xmm4
- pxor xmm7, xmm5
- aesenc xmm11, [rax+144]
- movdqa xmm4, xmm7
- pslldq xmm7, 12
- psrldq xmm4, 4
- aesenc xmm12, [rax+144]
- pxor xmm2, xmm7
- movdqa xmm5, xmm2
- movdqa xmm1, xmm2
- movdqa xmm0, xmm2
- aesenc xmm13, [rax+144]
- psrld xmm5, 1
- psrld xmm1, 2
- psrld xmm0, 7
- aesenc xmm14, [rax+144]
- pxor xmm5, xmm1
- pxor xmm5, xmm0
- aesenc xmm15, [rax+144]
- pxor xmm5, xmm4
- pxor xmm2, xmm5
- pxor xmm2, xmm3
- cmp r8d, 11
- movdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_decrypt_update_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+176]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- cmp r8d, 13
- movdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_decrypt_update_aesni_aesenc_128_ghash_avx_done
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+208]
- aesenc xmm8, xmm7
- aesenc xmm9, xmm7
- aesenc xmm10, xmm7
- aesenc xmm11, xmm7
- aesenc xmm12, xmm7
- aesenc xmm13, xmm7
- aesenc xmm14, xmm7
- aesenc xmm15, xmm7
- movdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_aesni_aesenc_128_ghash_avx_done:
- aesenclast xmm8, xmm7
- aesenclast xmm9, xmm7
- movdqu xmm0, [rcx]
- movdqu xmm1, [rcx+16]
- pxor xmm8, xmm0
- pxor xmm9, xmm1
- movdqu [rdx], xmm8
- movdqu [rdx+16], xmm9
- aesenclast xmm10, xmm7
- aesenclast xmm11, xmm7
- movdqu xmm0, [rcx+32]
- movdqu xmm1, [rcx+48]
- pxor xmm10, xmm0
- pxor xmm11, xmm1
- movdqu [rdx+32], xmm10
- movdqu [rdx+48], xmm11
- aesenclast xmm12, xmm7
- aesenclast xmm13, xmm7
- movdqu xmm0, [rcx+64]
- movdqu xmm1, [rcx+80]
- pxor xmm12, xmm0
- pxor xmm13, xmm1
- movdqu [rdx+64], xmm12
- movdqu [rdx+80], xmm13
- aesenclast xmm14, xmm7
- aesenclast xmm15, xmm7
- movdqu xmm0, [rcx+96]
- movdqu xmm1, [rcx+112]
- pxor xmm14, xmm0
- pxor xmm15, xmm1
- movdqu [rdx+96], xmm14
- movdqu [rdx+112], xmm15
- add edi, 128
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_aesni_ghash_128
- movdqa xmm6, xmm2
- movdqu xmm5, [rsp]
- L_AES_GCM_decrypt_update_aesni_done_128:
- mov edx, r9d
- cmp edi, edx
- jge L_AES_GCM_decrypt_update_aesni_done_dec
- mov r13d, r9d
- and r13d, 4294967280
- cmp edi, r13d
- jge L_AES_GCM_decrypt_update_aesni_last_block_done
- L_AES_GCM_decrypt_update_aesni_last_block_start:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- movdqu xmm1, [rcx]
- movdqa xmm0, xmm5
- pshufb xmm1, OWORD PTR L_aes_gcm_bswap_mask
- pxor xmm1, xmm6
- movdqu xmm8, [r15]
- movdqa xmm9, xmm8
- pshufb xmm8, OWORD PTR L_aes_gcm_bswap_epi64
- paddd xmm9, OWORD PTR L_aes_gcm_one
- pxor xmm8, [rax]
- movdqu [r15], xmm9
- movdqa xmm10, xmm1
- pclmulqdq xmm10, xmm0, 16
- aesenc xmm8, [rax+16]
- aesenc xmm8, [rax+32]
- movdqa xmm11, xmm1
- pclmulqdq xmm11, xmm0, 1
- aesenc xmm8, [rax+48]
- aesenc xmm8, [rax+64]
- movdqa xmm12, xmm1
- pclmulqdq xmm12, xmm0, 0
- aesenc xmm8, [rax+80]
- movdqa xmm1, xmm1
- pclmulqdq xmm1, xmm0, 17
- aesenc xmm8, [rax+96]
- pxor xmm10, xmm11
- movdqa xmm2, xmm10
- psrldq xmm10, 8
- pslldq xmm2, 8
- aesenc xmm8, [rax+112]
- movdqa xmm3, xmm1
- pxor xmm2, xmm12
- pxor xmm3, xmm10
- movdqa xmm0, OWORD PTR L_aes_gcm_mod2_128
- movdqa xmm11, xmm2
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [rax+128]
- pshufd xmm10, xmm2, 78
- pxor xmm10, xmm11
- movdqa xmm11, xmm10
- pclmulqdq xmm11, xmm0, 16
- aesenc xmm8, [rax+144]
- pshufd xmm6, xmm10, 78
- pxor xmm6, xmm11
- pxor xmm6, xmm3
- cmp r8d, 11
- movdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_decrypt_update_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+176]
- cmp r8d, 13
- movdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_decrypt_update_aesni_aesenc_gfmul_last
- aesenc xmm8, xmm9
- aesenc xmm8, [rax+208]
- movdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_aesni_aesenc_gfmul_last:
- aesenclast xmm8, xmm9
- movdqu xmm9, [rcx]
- pxor xmm8, xmm9
- movdqu [rdx], xmm8
- add edi, 16
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_aesni_last_block_start
- L_AES_GCM_decrypt_update_aesni_last_block_done:
- L_AES_GCM_decrypt_update_aesni_done_dec:
- movdqa OWORD PTR [r12], xmm6
- movdqu xmm6, [rsp+168]
- movdqu xmm7, [rsp+184]
- movdqu xmm8, [rsp+200]
- movdqu xmm9, [rsp+216]
- movdqu xmm10, [rsp+232]
- movdqu xmm11, [rsp+248]
- movdqu xmm12, [rsp+264]
- movdqu xmm13, [rsp+280]
- movdqu xmm14, [rsp+296]
- movdqu xmm15, [rsp+312]
- add rsp, 328
- pop rsi
- pop rdi
- pop r15
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_decrypt_update_aesni ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_final_aesni PROC
- push r13
- push r12
- push r14
- push rbp
- push r15
- mov rax, rcx
- mov r10d, r9d
- mov r9, rdx
- mov r11d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov rbp, QWORD PTR [rsp+104]
- sub rsp, 160
- movdqu [rsp+16], xmm6
- movdqu [rsp+32], xmm7
- movdqu [rsp+48], xmm8
- movdqu [rsp+64], xmm9
- movdqu [rsp+80], xmm10
- movdqu [rsp+96], xmm11
- movdqu [rsp+112], xmm12
- movdqu [rsp+128], xmm13
- movdqu [rsp+144], xmm15
- movdqa xmm6, OWORD PTR [rax]
- movdqa xmm5, OWORD PTR [r12]
- movdqa xmm15, OWORD PTR [r14]
- movdqa xmm8, xmm5
- movdqa xmm7, xmm5
- psrlq xmm8, 63
- psllq xmm7, 1
- pslldq xmm8, 8
- por xmm7, xmm8
- pshufd xmm5, xmm5, 255
- psrad xmm5, 31
- pand xmm5, OWORD PTR L_aes_gcm_mod2_128
- pxor xmm5, xmm7
- mov edx, r10d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- pinsrq xmm0, rdx, 0
- pinsrq xmm0, rcx, 1
- pxor xmm6, xmm0
- pshufd xmm8, xmm5, 78
- pshufd xmm9, xmm6, 78
- movdqa xmm10, xmm6
- movdqa xmm7, xmm6
- pclmulqdq xmm10, xmm5, 17
- pclmulqdq xmm7, xmm5, 0
- pxor xmm8, xmm5
- pxor xmm9, xmm6
- pclmulqdq xmm8, xmm9, 0
- pxor xmm8, xmm7
- pxor xmm8, xmm10
- movdqa xmm9, xmm8
- movdqa xmm6, xmm10
- pslldq xmm9, 8
- psrldq xmm8, 8
- pxor xmm7, xmm9
- pxor xmm6, xmm8
- movdqa xmm11, xmm7
- movdqa xmm12, xmm7
- movdqa xmm13, xmm7
- pslld xmm11, 31
- pslld xmm12, 30
- pslld xmm13, 25
- pxor xmm11, xmm12
- pxor xmm11, xmm13
- movdqa xmm12, xmm11
- psrldq xmm12, 4
- pslldq xmm11, 12
- pxor xmm7, xmm11
- movdqa xmm13, xmm7
- movdqa xmm9, xmm7
- movdqa xmm8, xmm7
- psrld xmm13, 1
- psrld xmm9, 2
- psrld xmm8, 7
- pxor xmm13, xmm9
- pxor xmm13, xmm8
- pxor xmm13, xmm12
- pxor xmm13, xmm7
- pxor xmm6, xmm13
- pshufb xmm6, OWORD PTR L_aes_gcm_bswap_mask
- movdqu xmm0, xmm15
- pxor xmm0, xmm6
- cmp r8d, 16
- je L_AES_GCM_decrypt_final_aesni_cmp_tag_16
- sub rsp, 16
- xor rcx, rcx
- xor r15, r15
- movdqu [rsp], xmm0
- L_AES_GCM_decrypt_final_aesni_cmp_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- xor r13b, BYTE PTR [r9+rcx]
- or r15b, r13b
- inc ecx
- cmp ecx, r8d
- jne L_AES_GCM_decrypt_final_aesni_cmp_tag_loop
- cmp r15, 0
- sete r15b
- add rsp, 16
- xor rcx, rcx
- jmp L_AES_GCM_decrypt_final_aesni_cmp_tag_done
- L_AES_GCM_decrypt_final_aesni_cmp_tag_16:
- movdqu xmm1, [r9]
- pcmpeqb xmm0, xmm1
- pmovmskb rdx, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor r15d, r15d
- cmp edx, 65535
- sete r15b
- L_AES_GCM_decrypt_final_aesni_cmp_tag_done:
- mov DWORD PTR [rbp], r15d
- movdqu xmm6, [rsp+16]
- movdqu xmm7, [rsp+32]
- movdqu xmm8, [rsp+48]
- movdqu xmm9, [rsp+64]
- movdqu xmm10, [rsp+80]
- movdqu xmm11, [rsp+96]
- movdqu xmm12, [rsp+112]
- movdqu xmm13, [rsp+128]
- movdqu xmm15, [rsp+144]
- add rsp, 160
- pop r15
- pop rbp
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_decrypt_final_aesni ENDP
- _text ENDS
- IFDEF HAVE_INTEL_AVX1
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_one QWORD 0, 1
- ptr_L_avx1_aes_gcm_one QWORD L_avx1_aes_gcm_one
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_two QWORD 0, 2
- ptr_L_avx1_aes_gcm_two QWORD L_avx1_aes_gcm_two
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_three QWORD 0, 3
- ptr_L_avx1_aes_gcm_three QWORD L_avx1_aes_gcm_three
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_four QWORD 0, 4
- ptr_L_avx1_aes_gcm_four QWORD L_avx1_aes_gcm_four
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_five QWORD 0, 5
- ptr_L_avx1_aes_gcm_five QWORD L_avx1_aes_gcm_five
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_six QWORD 0, 6
- ptr_L_avx1_aes_gcm_six QWORD L_avx1_aes_gcm_six
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_seven QWORD 0, 7
- ptr_L_avx1_aes_gcm_seven QWORD L_avx1_aes_gcm_seven
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_eight QWORD 0, 8
- ptr_L_avx1_aes_gcm_eight QWORD L_avx1_aes_gcm_eight
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_bswap_epi64 QWORD 283686952306183, 579005069656919567
- ptr_L_avx1_aes_gcm_bswap_epi64 QWORD L_avx1_aes_gcm_bswap_epi64
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_bswap_mask QWORD 579005069656919567, 283686952306183
- ptr_L_avx1_aes_gcm_bswap_mask QWORD L_avx1_aes_gcm_bswap_mask
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx1_aes_gcm_mod2_128 QWORD 1, 13979173243358019584
- ptr_L_avx1_aes_gcm_mod2_128 QWORD L_avx1_aes_gcm_mod2_128
- _DATA ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_avx1 PROC
- push r13
- push rdi
- push rsi
- push r12
- push rbx
- push r14
- push r15
- mov rdi, rcx
- mov rsi, rdx
- mov r12, r8
- mov rax, r9
- mov r8, QWORD PTR [rsp+96]
- mov r9d, DWORD PTR [rsp+104]
- mov r11d, DWORD PTR [rsp+112]
- mov ebx, DWORD PTR [rsp+120]
- mov r14d, DWORD PTR [rsp+128]
- mov r15, QWORD PTR [rsp+136]
- mov r10d, DWORD PTR [rsp+144]
- sub rsp, 320
- vmovdqu OWORD PTR [rsp+160], xmm6
- vmovdqu OWORD PTR [rsp+176], xmm7
- vmovdqu OWORD PTR [rsp+192], xmm8
- vmovdqu OWORD PTR [rsp+208], xmm9
- vmovdqu OWORD PTR [rsp+224], xmm10
- vmovdqu OWORD PTR [rsp+240], xmm11
- vmovdqu OWORD PTR [rsp+256], xmm12
- vmovdqu OWORD PTR [rsp+272], xmm13
- vmovdqu OWORD PTR [rsp+288], xmm14
- vmovdqu OWORD PTR [rsp+304], xmm15
- vpxor xmm4, xmm4, xmm4
- vpxor xmm6, xmm6, xmm6
- mov edx, ebx
- cmp edx, 12
- jne L_AES_GCM_encrypt_avx1_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- vmovq xmm4, QWORD PTR [rax]
- vpinsrd xmm4, xmm4, DWORD PTR [rax+8], 2
- vpinsrd xmm4, xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqa xmm5, OWORD PTR [r15]
- vpxor xmm1, xmm4, xmm5
- vmovdqa xmm7, OWORD PTR [r15+16]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+32]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+48]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+64]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+80]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+96]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+112]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+128]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+144]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- cmp r10d, 11
- vmovdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+176]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- cmp r10d, 13
- vmovdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+208]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm7
- vaesenclast xmm1, xmm1, xmm7
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vmovdqu OWORD PTR [rsp+144], xmm1
- jmp L_AES_GCM_encrypt_avx1_iv_done
- L_AES_GCM_encrypt_avx1_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqa xmm5, OWORD PTR [r15]
- vaesenc xmm5, xmm5, [r15+16]
- vaesenc xmm5, xmm5, [r15+32]
- vaesenc xmm5, xmm5, [r15+48]
- vaesenc xmm5, xmm5, [r15+64]
- vaesenc xmm5, xmm5, [r15+80]
- vaesenc xmm5, xmm5, [r15+96]
- vaesenc xmm5, xmm5, [r15+112]
- vaesenc xmm5, xmm5, [r15+128]
- vaesenc xmm5, xmm5, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm9
- vaesenc xmm5, xmm5, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm9
- vaesenc xmm5, xmm5, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm9
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_encrypt_avx1_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_encrypt_avx1_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_avx1_calc_iv_16_loop:
- vmovdqu xmm8, OWORD PTR [rax+rcx]
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx1_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_encrypt_avx1_calc_iv_done
- L_AES_GCM_encrypt_avx1_calc_iv_lt16:
- sub rsp, 16
- vpxor xmm8, xmm8, xmm8
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm8
- L_AES_GCM_encrypt_avx1_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx1_calc_iv_loop
- vmovdqu xmm8, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- L_AES_GCM_encrypt_avx1_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqa xmm8, OWORD PTR [r15]
- vpxor xmm8, xmm8, xmm4
- vaesenc xmm8, xmm8, [r15+16]
- vaesenc xmm8, xmm8, [r15+32]
- vaesenc xmm8, xmm8, [r15+48]
- vaesenc xmm8, xmm8, [r15+64]
- vaesenc xmm8, xmm8, [r15+80]
- vaesenc xmm8, xmm8, [r15+96]
- vaesenc xmm8, xmm8, [r15+112]
- vaesenc xmm8, xmm8, [r15+128]
- vaesenc xmm8, xmm8, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqu OWORD PTR [rsp+144], xmm8
- L_AES_GCM_encrypt_avx1_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_encrypt_avx1_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_encrypt_avx1_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_avx1_calc_aad_16_loop:
- vmovdqu xmm8, OWORD PTR [r12+rcx]
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm6, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpxor xmm1, xmm1, xmm6
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm6, xmm6, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm6, xmm6, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx1_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_encrypt_avx1_calc_aad_done
- L_AES_GCM_encrypt_avx1_calc_aad_lt16:
- sub rsp, 16
- vpxor xmm8, xmm8, xmm8
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm8
- L_AES_GCM_encrypt_avx1_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx1_calc_aad_loop
- vmovdqu xmm8, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm6, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpxor xmm1, xmm1, xmm6
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm6, xmm6, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm6, xmm6, xmm2
- L_AES_GCM_encrypt_avx1_calc_aad_done:
- ; Calculate counter and H
- vpsrlq xmm9, xmm5, 63
- vpsllq xmm8, xmm5, 1
- vpslldq xmm9, xmm9, 8
- vpor xmm8, xmm8, xmm9
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpaddd xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_one
- vpxor xmm5, xmm5, xmm8
- vmovdqu OWORD PTR [rsp+128], xmm4
- xor ebx, ebx
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_encrypt_avx1_done_128
- and r13d, 4294967168
- vmovdqa xmm2, xmm6
- ; H ^ 1
- vmovdqu OWORD PTR [rsp], xmm5
- ; H ^ 2
- vpclmulqdq xmm8, xmm5, xmm5, 0
- vpclmulqdq xmm0, xmm5, xmm5, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm0, xmm0, xmm14
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm0, 78
- vpclmulqdq xmm11, xmm0, xmm5, 17
- vpclmulqdq xmm8, xmm0, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm0
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm1, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm1, xmm1, xmm14
- vmovdqu OWORD PTR [rsp+32], xmm1
- ; H ^ 4
- vpclmulqdq xmm8, xmm0, xmm0, 0
- vpclmulqdq xmm3, xmm0, xmm0, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm3, xmm3, xmm14
- vmovdqu OWORD PTR [rsp+48], xmm3
- ; H ^ 5
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm0, 78
- vpshufd xmm10, xmm1, 78
- vpclmulqdq xmm11, xmm1, xmm0, 17
- vpclmulqdq xmm8, xmm1, xmm0, 0
- vpxor xmm9, xmm9, xmm0
- vpxor xmm10, xmm10, xmm1
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+64], xmm7
- ; H ^ 6
- vpclmulqdq xmm8, xmm1, xmm1, 0
- vpclmulqdq xmm7, xmm1, xmm1, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+80], xmm7
- ; H ^ 7
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm1, 78
- vpshufd xmm10, xmm3, 78
- vpclmulqdq xmm11, xmm3, xmm1, 17
- vpclmulqdq xmm8, xmm3, xmm1, 0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm3
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+96], xmm7
- ; H ^ 8
- vpclmulqdq xmm8, xmm3, xmm3, 0
- vpclmulqdq xmm7, xmm3, xmm3, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+112], xmm7
- ; First 128 bytes of input
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [r15]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+16]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+32]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+48]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+64]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+80]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+96]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+112]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+128]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+144]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r10d, 11
- vmovdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r10d, 13
- vmovdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_aesenc_128_enc_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [rdi]
- vmovdqu xmm1, OWORD PTR [rdi+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [rsi], xmm8
- vmovdqu OWORD PTR [rsi+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rdi+32]
- vmovdqu xmm1, OWORD PTR [rdi+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [rsi+32], xmm10
- vmovdqu OWORD PTR [rsi+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [rdi+64]
- vmovdqu xmm1, OWORD PTR [rdi+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [rsi+64], xmm12
- vmovdqu OWORD PTR [rsi+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rdi+96]
- vmovdqu xmm1, OWORD PTR [rdi+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [rsi+96], xmm14
- vmovdqu OWORD PTR [rsi+112], xmm15
- cmp r13d, 128
- mov ebx, 128
- jle L_AES_GCM_encrypt_avx1_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_avx1_ghash_128:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [r15]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vmovdqu xmm0, OWORD PTR [rdx+-128]
- vaesenc xmm8, xmm8, [r15+16]
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm2
- vpshufd xmm1, xmm7, 78
- vpshufd xmm5, xmm0, 78
- vpxor xmm1, xmm1, xmm7
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm3, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+16]
- vaesenc xmm10, xmm10, [r15+16]
- vpclmulqdq xmm2, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+16]
- vaesenc xmm12, xmm12, [r15+16]
- vpclmulqdq xmm1, xmm1, xmm5, 0
- vaesenc xmm13, xmm13, [r15+16]
- vaesenc xmm14, xmm14, [r15+16]
- vaesenc xmm15, xmm15, [r15+16]
- vpxor xmm1, xmm1, xmm2
- vpxor xmm1, xmm1, xmm3
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm0, OWORD PTR [rdx+-112]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+32]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+32]
- vaesenc xmm10, xmm10, [r15+32]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+32]
- vaesenc xmm12, xmm12, [r15+32]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+32]
- vaesenc xmm14, xmm14, [r15+32]
- vaesenc xmm15, xmm15, [r15+32]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vmovdqu xmm0, OWORD PTR [rdx+-96]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+48]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+48]
- vaesenc xmm10, xmm10, [r15+48]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+48]
- vaesenc xmm12, xmm12, [r15+48]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+48]
- vaesenc xmm14, xmm14, [r15+48]
- vaesenc xmm15, xmm15, [r15+48]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm0, OWORD PTR [rdx+-80]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+64]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+64]
- vaesenc xmm10, xmm10, [r15+64]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+64]
- vaesenc xmm12, xmm12, [r15+64]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+64]
- vaesenc xmm14, xmm14, [r15+64]
- vaesenc xmm15, xmm15, [r15+64]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vmovdqu xmm0, OWORD PTR [rdx+-64]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+80]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+80]
- vaesenc xmm10, xmm10, [r15+80]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+80]
- vaesenc xmm12, xmm12, [r15+80]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+80]
- vaesenc xmm14, xmm14, [r15+80]
- vaesenc xmm15, xmm15, [r15+80]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm0, OWORD PTR [rdx+-48]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+96]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+96]
- vaesenc xmm10, xmm10, [r15+96]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+96]
- vaesenc xmm12, xmm12, [r15+96]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+96]
- vaesenc xmm14, xmm14, [r15+96]
- vaesenc xmm15, xmm15, [r15+96]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vmovdqu xmm0, OWORD PTR [rdx+-32]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+112]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+112]
- vaesenc xmm10, xmm10, [r15+112]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+112]
- vaesenc xmm12, xmm12, [r15+112]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+112]
- vaesenc xmm14, xmm14, [r15+112]
- vaesenc xmm15, xmm15, [r15+112]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm0, OWORD PTR [rdx+-16]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+128]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+128]
- vaesenc xmm10, xmm10, [r15+128]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+128]
- vaesenc xmm12, xmm12, [r15+128]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+128]
- vaesenc xmm14, xmm14, [r15+128]
- vaesenc xmm15, xmm15, [r15+128]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vpslldq xmm5, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vaesenc xmm8, xmm8, [r15+144]
- vpxor xmm2, xmm2, xmm5
- vpxor xmm3, xmm3, xmm1
- vaesenc xmm9, xmm9, [r15+144]
- vpslld xmm7, xmm2, 31
- vpslld xmm4, xmm2, 30
- vpslld xmm5, xmm2, 25
- vaesenc xmm10, xmm10, [r15+144]
- vpxor xmm7, xmm7, xmm4
- vpxor xmm7, xmm7, xmm5
- vaesenc xmm11, xmm11, [r15+144]
- vpsrldq xmm4, xmm7, 4
- vpslldq xmm7, xmm7, 12
- vaesenc xmm12, xmm12, [r15+144]
- vpxor xmm2, xmm2, xmm7
- vpsrld xmm5, xmm2, 1
- vaesenc xmm13, xmm13, [r15+144]
- vpsrld xmm1, xmm2, 2
- vpsrld xmm0, xmm2, 7
- vaesenc xmm14, xmm14, [r15+144]
- vpxor xmm5, xmm5, xmm1
- vpxor xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, [r15+144]
- vpxor xmm5, xmm5, xmm4
- vpxor xmm2, xmm2, xmm5
- vpxor xmm2, xmm2, xmm3
- cmp r10d, 11
- vmovdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r10d, 13
- vmovdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_aesenc_128_ghash_avx_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_avx1_ghash_128
- L_AES_GCM_encrypt_avx1_end_128:
- vmovdqa xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpshufb xmm8, xmm8, xmm4
- vpshufb xmm9, xmm9, xmm4
- vpshufb xmm10, xmm10, xmm4
- vpshufb xmm11, xmm11, xmm4
- vpxor xmm8, xmm8, xmm2
- vpshufb xmm12, xmm12, xmm4
- vpshufb xmm13, xmm13, xmm4
- vpshufb xmm14, xmm14, xmm4
- vpshufb xmm15, xmm15, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm5, OWORD PTR [rsp+16]
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm15, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm15, 17
- vpclmulqdq xmm0, xmm7, xmm15, 0
- vpxor xmm1, xmm1, xmm15
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm4, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm14, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm14, 17
- vpclmulqdq xmm0, xmm5, xmm14, 0
- vpxor xmm1, xmm1, xmm14
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm5, OWORD PTR [rsp+48]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm13, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm13, 17
- vpclmulqdq xmm0, xmm7, xmm13, 0
- vpxor xmm1, xmm1, xmm13
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm12, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm12, 17
- vpclmulqdq xmm0, xmm5, xmm12, 0
- vpxor xmm1, xmm1, xmm12
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm5, OWORD PTR [rsp+80]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm11, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm11, 17
- vpclmulqdq xmm0, xmm7, xmm11, 0
- vpxor xmm1, xmm1, xmm11
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm10, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm10, 17
- vpclmulqdq xmm0, xmm5, xmm10, 0
- vpxor xmm1, xmm1, xmm10
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm5, OWORD PTR [rsp+112]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm9, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm9, 17
- vpclmulqdq xmm0, xmm7, xmm9, 0
- vpxor xmm1, xmm1, xmm9
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm8, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm8, 17
- vpclmulqdq xmm0, xmm5, xmm8, 0
- vpxor xmm1, xmm1, xmm8
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vpslld xmm0, xmm4, 31
- vpslld xmm1, xmm4, 30
- vpslld xmm2, xmm4, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm4, xmm4, xmm0
- vpsrld xmm2, xmm4, 1
- vpsrld xmm3, xmm4, 2
- vpsrld xmm0, xmm4, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm4
- vpxor xmm6, xmm6, xmm2
- vmovdqu xmm5, OWORD PTR [rsp]
- L_AES_GCM_encrypt_avx1_done_128:
- mov edx, r9d
- cmp ebx, edx
- jge L_AES_GCM_encrypt_avx1_done_enc
- mov r13d, r9d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_avx1_last_block_done
- vmovdqu xmm9, OWORD PTR [rsp+128]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [rsp+128], xmm9
- vpxor xmm8, xmm8, [r15]
- vaesenc xmm8, xmm8, [r15+16]
- vaesenc xmm8, xmm8, [r15+32]
- vaesenc xmm8, xmm8, [r15+48]
- vaesenc xmm8, xmm8, [r15+64]
- vaesenc xmm8, xmm8, [r15+80]
- vaesenc xmm8, xmm8, [r15+96]
- vaesenc xmm8, xmm8, [r15+112]
- vaesenc xmm8, xmm8, [r15+128]
- vaesenc xmm8, xmm8, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_aesenc_block_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_aesenc_block_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_aesenc_block_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqu xmm9, OWORD PTR [rdi+rbx]
- vpxor xmm8, xmm8, xmm9
- vmovdqu OWORD PTR [rsi+rbx], xmm8
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- add ebx, 16
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_avx1_last_block_ghash
- L_AES_GCM_encrypt_avx1_last_block_start:
- vmovdqu xmm13, OWORD PTR [rdi+rbx]
- vmovdqu xmm9, OWORD PTR [rsp+128]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [rsp+128], xmm9
- vpxor xmm8, xmm8, [r15]
- vpclmulqdq xmm10, xmm6, xmm5, 16
- vaesenc xmm8, xmm8, [r15+16]
- vaesenc xmm8, xmm8, [r15+32]
- vpclmulqdq xmm11, xmm6, xmm5, 1
- vaesenc xmm8, xmm8, [r15+48]
- vaesenc xmm8, xmm8, [r15+64]
- vpclmulqdq xmm12, xmm6, xmm5, 0
- vaesenc xmm8, xmm8, [r15+80]
- vpclmulqdq xmm1, xmm6, xmm5, 17
- vaesenc xmm8, xmm8, [r15+96]
- vpxor xmm10, xmm10, xmm11
- vpslldq xmm2, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vaesenc xmm8, xmm8, [r15+112]
- vpxor xmm2, xmm2, xmm12
- vpxor xmm3, xmm1, xmm10
- vmovdqa xmm0, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpclmulqdq xmm11, xmm2, xmm0, 16
- vaesenc xmm8, xmm8, [r15+128]
- vpshufd xmm10, xmm2, 78
- vpxor xmm10, xmm10, xmm11
- vpclmulqdq xmm11, xmm10, xmm0, 16
- vaesenc xmm8, xmm8, [r15+144]
- vpshufd xmm10, xmm10, 78
- vpxor xmm10, xmm10, xmm11
- vpxor xmm6, xmm10, xmm3
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_aesenc_gfmul_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqa xmm0, xmm13
- vpxor xmm8, xmm8, xmm0
- vmovdqu OWORD PTR [rsi+rbx], xmm8
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- add ebx, 16
- vpxor xmm6, xmm6, xmm8
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_avx1_last_block_start
- L_AES_GCM_encrypt_avx1_last_block_ghash:
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- L_AES_GCM_encrypt_avx1_last_block_done:
- mov ecx, r9d
- mov edx, ecx
- and ecx, 15
- jz L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_done
- vmovdqu xmm4, OWORD PTR [rsp+128]
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpxor xmm4, xmm4, [r15]
- vaesenc xmm4, xmm4, [r15+16]
- vaesenc xmm4, xmm4, [r15+32]
- vaesenc xmm4, xmm4, [r15+48]
- vaesenc xmm4, xmm4, [r15+64]
- vaesenc xmm4, xmm4, [r15+80]
- vaesenc xmm4, xmm4, [r15+96]
- vaesenc xmm4, xmm4, [r15+112]
- vaesenc xmm4, xmm4, [r15+128]
- vaesenc xmm4, xmm4, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm9
- vaesenc xmm4, xmm4, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm9
- vaesenc xmm4, xmm4, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_aesenc_avx_last:
- vaesenclast xmm4, xmm4, xmm9
- sub rsp, 16
- xor ecx, ecx
- vmovdqu OWORD PTR [rsp], xmm4
- L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [rsi+rbx], r13b
- mov BYTE PTR [rsp+rcx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_loop
- xor r13, r13
- cmp ecx, 16
- je L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_finish_enc
- L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_byte_loop:
- mov BYTE PTR [rsp+rcx], r13b
- inc ecx
- cmp ecx, 16
- jl L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_byte_loop
- L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_finish_enc:
- vmovdqu xmm4, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm4
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- L_AES_GCM_encrypt_avx1_aesenc_last15_enc_avx_done:
- L_AES_GCM_encrypt_avx1_done_enc:
- mov edx, r9d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- vmovq xmm0, rdx
- vmovq xmm1, rcx
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- vpshufb xmm6, xmm6, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vmovdqu xmm0, OWORD PTR [rsp+144]
- vpxor xmm0, xmm0, xmm6
- cmp r14d, 16
- je L_AES_GCM_encrypt_avx1_store_tag_16
- xor rcx, rcx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_avx1_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r8+rcx], r13b
- inc ecx
- cmp ecx, r14d
- jne L_AES_GCM_encrypt_avx1_store_tag_loop
- jmp L_AES_GCM_encrypt_avx1_store_tag_done
- L_AES_GCM_encrypt_avx1_store_tag_16:
- vmovdqu OWORD PTR [r8], xmm0
- L_AES_GCM_encrypt_avx1_store_tag_done:
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+160]
- vmovdqu xmm7, OWORD PTR [rsp+176]
- vmovdqu xmm8, OWORD PTR [rsp+192]
- vmovdqu xmm9, OWORD PTR [rsp+208]
- vmovdqu xmm10, OWORD PTR [rsp+224]
- vmovdqu xmm11, OWORD PTR [rsp+240]
- vmovdqu xmm12, OWORD PTR [rsp+256]
- vmovdqu xmm13, OWORD PTR [rsp+272]
- vmovdqu xmm14, OWORD PTR [rsp+288]
- vmovdqu xmm15, OWORD PTR [rsp+304]
- add rsp, 320
- pop r15
- pop r14
- pop rbx
- pop r12
- pop rsi
- pop rdi
- pop r13
- ret
- AES_GCM_encrypt_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_avx1 PROC
- push r13
- push rdi
- push rsi
- push r12
- push rbx
- push r14
- push r15
- push rbp
- mov rdi, rcx
- mov rsi, rdx
- mov r12, r8
- mov rax, r9
- mov r8, QWORD PTR [rsp+104]
- mov r9d, DWORD PTR [rsp+112]
- mov r11d, DWORD PTR [rsp+120]
- mov ebx, DWORD PTR [rsp+128]
- mov r14d, DWORD PTR [rsp+136]
- mov r15, QWORD PTR [rsp+144]
- mov r10d, DWORD PTR [rsp+152]
- mov rbp, QWORD PTR [rsp+160]
- sub rsp, 328
- vmovdqu OWORD PTR [rsp+168], xmm6
- vmovdqu OWORD PTR [rsp+184], xmm7
- vmovdqu OWORD PTR [rsp+200], xmm8
- vmovdqu OWORD PTR [rsp+216], xmm9
- vmovdqu OWORD PTR [rsp+232], xmm10
- vmovdqu OWORD PTR [rsp+248], xmm11
- vmovdqu OWORD PTR [rsp+264], xmm12
- vmovdqu OWORD PTR [rsp+280], xmm13
- vmovdqu OWORD PTR [rsp+296], xmm14
- vmovdqu OWORD PTR [rsp+312], xmm15
- vpxor xmm4, xmm4, xmm4
- vpxor xmm6, xmm6, xmm6
- cmp ebx, 12
- mov edx, ebx
- jne L_AES_GCM_decrypt_avx1_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- vmovq xmm4, QWORD PTR [rax]
- vpinsrd xmm4, xmm4, DWORD PTR [rax+8], 2
- vpinsrd xmm4, xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqa xmm5, OWORD PTR [r15]
- vpxor xmm1, xmm4, xmm5
- vmovdqa xmm7, OWORD PTR [r15+16]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+32]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+48]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+64]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+80]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+96]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+112]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+128]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+144]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- cmp r10d, 11
- vmovdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+176]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- cmp r10d, 13
- vmovdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+208]
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm1, xmm1, xmm7
- vmovdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm7
- vaesenclast xmm1, xmm1, xmm7
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vmovdqu OWORD PTR [rsp+144], xmm1
- jmp L_AES_GCM_decrypt_avx1_iv_done
- L_AES_GCM_decrypt_avx1_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqa xmm5, OWORD PTR [r15]
- vaesenc xmm5, xmm5, [r15+16]
- vaesenc xmm5, xmm5, [r15+32]
- vaesenc xmm5, xmm5, [r15+48]
- vaesenc xmm5, xmm5, [r15+64]
- vaesenc xmm5, xmm5, [r15+80]
- vaesenc xmm5, xmm5, [r15+96]
- vaesenc xmm5, xmm5, [r15+112]
- vaesenc xmm5, xmm5, [r15+128]
- vaesenc xmm5, xmm5, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm9
- vaesenc xmm5, xmm5, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm9
- vaesenc xmm5, xmm5, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm9
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_decrypt_avx1_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_decrypt_avx1_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_avx1_calc_iv_16_loop:
- vmovdqu xmm8, OWORD PTR [rax+rcx]
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx1_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_decrypt_avx1_calc_iv_done
- L_AES_GCM_decrypt_avx1_calc_iv_lt16:
- sub rsp, 16
- vpxor xmm8, xmm8, xmm8
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm8
- L_AES_GCM_decrypt_avx1_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx1_calc_iv_loop
- vmovdqu xmm8, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- L_AES_GCM_decrypt_avx1_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm4, xmm4, xmm2
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqa xmm8, OWORD PTR [r15]
- vpxor xmm8, xmm8, xmm4
- vaesenc xmm8, xmm8, [r15+16]
- vaesenc xmm8, xmm8, [r15+32]
- vaesenc xmm8, xmm8, [r15+48]
- vaesenc xmm8, xmm8, [r15+64]
- vaesenc xmm8, xmm8, [r15+80]
- vaesenc xmm8, xmm8, [r15+96]
- vaesenc xmm8, xmm8, [r15+112]
- vaesenc xmm8, xmm8, [r15+128]
- vaesenc xmm8, xmm8, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqu OWORD PTR [rsp+144], xmm8
- L_AES_GCM_decrypt_avx1_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_decrypt_avx1_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_decrypt_avx1_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_avx1_calc_aad_16_loop:
- vmovdqu xmm8, OWORD PTR [r12+rcx]
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm6, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpxor xmm1, xmm1, xmm6
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm6, xmm6, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm6, xmm6, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx1_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_decrypt_avx1_calc_aad_done
- L_AES_GCM_decrypt_avx1_calc_aad_lt16:
- sub rsp, 16
- vpxor xmm8, xmm8, xmm8
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm8
- L_AES_GCM_decrypt_avx1_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx1_calc_aad_loop
- vmovdqu xmm8, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm6, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpxor xmm1, xmm1, xmm6
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm7, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm7, xmm7, xmm2
- vpxor xmm6, xmm6, xmm1
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- vpslld xmm0, xmm7, 31
- vpslld xmm1, xmm7, 30
- vpslld xmm2, xmm7, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm7, xmm7, xmm0
- vpsrld xmm2, xmm7, 1
- vpsrld xmm3, xmm7, 2
- vpsrld xmm0, xmm7, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm7
- vpxor xmm6, xmm6, xmm2
- L_AES_GCM_decrypt_avx1_calc_aad_done:
- ; Calculate counter and H
- vpsrlq xmm9, xmm5, 63
- vpsllq xmm8, xmm5, 1
- vpslldq xmm9, xmm9, 8
- vpor xmm8, xmm8, xmm9
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpaddd xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_one
- vpxor xmm5, xmm5, xmm8
- vmovdqu OWORD PTR [rsp+128], xmm4
- xor ebx, ebx
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_decrypt_avx1_done_128
- and r13d, 4294967168
- vmovdqa xmm2, xmm6
- ; H ^ 1
- vmovdqu OWORD PTR [rsp], xmm5
- ; H ^ 2
- vpclmulqdq xmm8, xmm5, xmm5, 0
- vpclmulqdq xmm0, xmm5, xmm5, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm0, xmm0, xmm14
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm0, 78
- vpclmulqdq xmm11, xmm0, xmm5, 17
- vpclmulqdq xmm8, xmm0, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm0
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm1, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm1, xmm1, xmm14
- vmovdqu OWORD PTR [rsp+32], xmm1
- ; H ^ 4
- vpclmulqdq xmm8, xmm0, xmm0, 0
- vpclmulqdq xmm3, xmm0, xmm0, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm3, xmm3, xmm14
- vmovdqu OWORD PTR [rsp+48], xmm3
- ; H ^ 5
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm0, 78
- vpshufd xmm10, xmm1, 78
- vpclmulqdq xmm11, xmm1, xmm0, 17
- vpclmulqdq xmm8, xmm1, xmm0, 0
- vpxor xmm9, xmm9, xmm0
- vpxor xmm10, xmm10, xmm1
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+64], xmm7
- ; H ^ 6
- vpclmulqdq xmm8, xmm1, xmm1, 0
- vpclmulqdq xmm7, xmm1, xmm1, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+80], xmm7
- ; H ^ 7
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm1, 78
- vpshufd xmm10, xmm3, 78
- vpclmulqdq xmm11, xmm3, xmm1, 17
- vpclmulqdq xmm8, xmm3, xmm1, 0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm3
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+96], xmm7
- ; H ^ 8
- vpclmulqdq xmm8, xmm3, xmm3, 0
- vpclmulqdq xmm7, xmm3, xmm3, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+112], xmm7
- L_AES_GCM_decrypt_avx1_ghash_128:
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [rsi+rbx]
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [r15]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vmovdqu xmm0, OWORD PTR [rcx]
- vaesenc xmm8, xmm8, [r15+16]
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm2
- vpshufd xmm1, xmm7, 78
- vpshufd xmm5, xmm0, 78
- vpxor xmm1, xmm1, xmm7
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm3, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+16]
- vaesenc xmm10, xmm10, [r15+16]
- vpclmulqdq xmm2, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+16]
- vaesenc xmm12, xmm12, [r15+16]
- vpclmulqdq xmm1, xmm1, xmm5, 0
- vaesenc xmm13, xmm13, [r15+16]
- vaesenc xmm14, xmm14, [r15+16]
- vaesenc xmm15, xmm15, [r15+16]
- vpxor xmm1, xmm1, xmm2
- vpxor xmm1, xmm1, xmm3
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm0, OWORD PTR [rcx+16]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+32]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+32]
- vaesenc xmm10, xmm10, [r15+32]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+32]
- vaesenc xmm12, xmm12, [r15+32]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+32]
- vaesenc xmm14, xmm14, [r15+32]
- vaesenc xmm15, xmm15, [r15+32]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+48]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+48]
- vaesenc xmm10, xmm10, [r15+48]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+48]
- vaesenc xmm12, xmm12, [r15+48]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+48]
- vaesenc xmm14, xmm14, [r15+48]
- vaesenc xmm15, xmm15, [r15+48]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm0, OWORD PTR [rcx+48]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+64]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+64]
- vaesenc xmm10, xmm10, [r15+64]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+64]
- vaesenc xmm12, xmm12, [r15+64]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+64]
- vaesenc xmm14, xmm14, [r15+64]
- vaesenc xmm15, xmm15, [r15+64]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+80]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+80]
- vaesenc xmm10, xmm10, [r15+80]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+80]
- vaesenc xmm12, xmm12, [r15+80]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+80]
- vaesenc xmm14, xmm14, [r15+80]
- vaesenc xmm15, xmm15, [r15+80]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm0, OWORD PTR [rcx+80]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+96]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+96]
- vaesenc xmm10, xmm10, [r15+96]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+96]
- vaesenc xmm12, xmm12, [r15+96]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+96]
- vaesenc xmm14, xmm14, [r15+96]
- vaesenc xmm15, xmm15, [r15+96]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+112]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+112]
- vaesenc xmm10, xmm10, [r15+112]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+112]
- vaesenc xmm12, xmm12, [r15+112]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+112]
- vaesenc xmm14, xmm14, [r15+112]
- vaesenc xmm15, xmm15, [r15+112]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm0, OWORD PTR [rcx+112]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [r15+128]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [r15+128]
- vaesenc xmm10, xmm10, [r15+128]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [r15+128]
- vaesenc xmm12, xmm12, [r15+128]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [r15+128]
- vaesenc xmm14, xmm14, [r15+128]
- vaesenc xmm15, xmm15, [r15+128]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vpslldq xmm5, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vaesenc xmm8, xmm8, [r15+144]
- vpxor xmm2, xmm2, xmm5
- vpxor xmm3, xmm3, xmm1
- vaesenc xmm9, xmm9, [r15+144]
- vpslld xmm7, xmm2, 31
- vpslld xmm4, xmm2, 30
- vpslld xmm5, xmm2, 25
- vaesenc xmm10, xmm10, [r15+144]
- vpxor xmm7, xmm7, xmm4
- vpxor xmm7, xmm7, xmm5
- vaesenc xmm11, xmm11, [r15+144]
- vpsrldq xmm4, xmm7, 4
- vpslldq xmm7, xmm7, 12
- vaesenc xmm12, xmm12, [r15+144]
- vpxor xmm2, xmm2, xmm7
- vpsrld xmm5, xmm2, 1
- vaesenc xmm13, xmm13, [r15+144]
- vpsrld xmm1, xmm2, 2
- vpsrld xmm0, xmm2, 7
- vaesenc xmm14, xmm14, [r15+144]
- vpxor xmm5, xmm5, xmm1
- vpxor xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, [r15+144]
- vpxor xmm5, xmm5, xmm4
- vpxor xmm2, xmm2, xmm5
- vpxor xmm2, xmm2, xmm3
- cmp r10d, 11
- vmovdqa xmm7, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r10d, 13
- vmovdqa xmm7, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_aesenc_128_ghash_avx_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_avx1_ghash_128
- vmovdqa xmm6, xmm2
- vmovdqu xmm5, OWORD PTR [rsp]
- L_AES_GCM_decrypt_avx1_done_128:
- mov edx, r9d
- cmp ebx, edx
- jge L_AES_GCM_decrypt_avx1_done_dec
- mov r13d, r9d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_decrypt_avx1_last_block_done
- L_AES_GCM_decrypt_avx1_last_block_start:
- vmovdqu xmm13, OWORD PTR [rdi+rbx]
- vmovdqa xmm0, xmm5
- vpshufb xmm1, xmm13, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm1, xmm1, xmm6
- vmovdqu xmm9, OWORD PTR [rsp+128]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [rsp+128], xmm9
- vpxor xmm8, xmm8, [r15]
- vpclmulqdq xmm10, xmm1, xmm0, 16
- vaesenc xmm8, xmm8, [r15+16]
- vaesenc xmm8, xmm8, [r15+32]
- vpclmulqdq xmm11, xmm1, xmm0, 1
- vaesenc xmm8, xmm8, [r15+48]
- vaesenc xmm8, xmm8, [r15+64]
- vpclmulqdq xmm12, xmm1, xmm0, 0
- vaesenc xmm8, xmm8, [r15+80]
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vaesenc xmm8, xmm8, [r15+96]
- vpxor xmm10, xmm10, xmm11
- vpslldq xmm2, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vaesenc xmm8, xmm8, [r15+112]
- vpxor xmm2, xmm2, xmm12
- vpxor xmm3, xmm1, xmm10
- vmovdqa xmm0, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpclmulqdq xmm11, xmm2, xmm0, 16
- vaesenc xmm8, xmm8, [r15+128]
- vpshufd xmm10, xmm2, 78
- vpxor xmm10, xmm10, xmm11
- vpclmulqdq xmm11, xmm10, xmm0, 16
- vaesenc xmm8, xmm8, [r15+144]
- vpshufd xmm10, xmm10, 78
- vpxor xmm10, xmm10, xmm11
- vpxor xmm6, xmm10, xmm3
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_aesenc_gfmul_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqa xmm0, xmm13
- vpxor xmm8, xmm8, xmm0
- vmovdqu OWORD PTR [rsi+rbx], xmm8
- add ebx, 16
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_avx1_last_block_start
- L_AES_GCM_decrypt_avx1_last_block_done:
- mov ecx, r9d
- mov edx, ecx
- and ecx, 15
- jz L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_done
- vmovdqu xmm4, OWORD PTR [rsp+128]
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpxor xmm4, xmm4, [r15]
- vaesenc xmm4, xmm4, [r15+16]
- vaesenc xmm4, xmm4, [r15+32]
- vaesenc xmm4, xmm4, [r15+48]
- vaesenc xmm4, xmm4, [r15+64]
- vaesenc xmm4, xmm4, [r15+80]
- vaesenc xmm4, xmm4, [r15+96]
- vaesenc xmm4, xmm4, [r15+112]
- vaesenc xmm4, xmm4, [r15+128]
- vaesenc xmm4, xmm4, [r15+144]
- cmp r10d, 11
- vmovdqa xmm9, OWORD PTR [r15+160]
- jl L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm9
- vaesenc xmm4, xmm4, [r15+176]
- cmp r10d, 13
- vmovdqa xmm9, OWORD PTR [r15+192]
- jl L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm9
- vaesenc xmm4, xmm4, [r15+208]
- vmovdqa xmm9, OWORD PTR [r15+224]
- L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_aesenc_avx_last:
- vaesenclast xmm4, xmm4, xmm9
- sub rsp, 32
- xor ecx, ecx
- vmovdqu OWORD PTR [rsp], xmm4
- vpxor xmm0, xmm0, xmm0
- vmovdqu OWORD PTR [rsp+16], xmm0
- L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- mov BYTE PTR [rsp+rcx+16], r13b
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [rsi+rbx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_loop
- vmovdqu xmm4, OWORD PTR [rsp+16]
- add rsp, 32
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm4
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- L_AES_GCM_decrypt_avx1_aesenc_last15_dec_avx_done:
- L_AES_GCM_decrypt_avx1_done_dec:
- mov edx, r9d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- vmovq xmm0, rdx
- vmovq xmm1, rcx
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- vpshufb xmm6, xmm6, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vmovdqu xmm0, OWORD PTR [rsp+144]
- vpxor xmm0, xmm0, xmm6
- cmp r14d, 16
- je L_AES_GCM_decrypt_avx1_cmp_tag_16
- sub rsp, 16
- xor rcx, rcx
- xor rbx, rbx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_avx1_cmp_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- xor r13b, BYTE PTR [r8+rcx]
- or bl, r13b
- inc ecx
- cmp ecx, r14d
- jne L_AES_GCM_decrypt_avx1_cmp_tag_loop
- cmp rbx, 0
- sete bl
- add rsp, 16
- xor rcx, rcx
- jmp L_AES_GCM_decrypt_avx1_cmp_tag_done
- L_AES_GCM_decrypt_avx1_cmp_tag_16:
- vmovdqu xmm1, OWORD PTR [r8]
- vpcmpeqb xmm0, xmm0, xmm1
- vpmovmskb rdx, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor ebx, ebx
- cmp edx, 65535
- sete bl
- L_AES_GCM_decrypt_avx1_cmp_tag_done:
- mov DWORD PTR [rbp], ebx
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+168]
- vmovdqu xmm7, OWORD PTR [rsp+184]
- vmovdqu xmm8, OWORD PTR [rsp+200]
- vmovdqu xmm9, OWORD PTR [rsp+216]
- vmovdqu xmm10, OWORD PTR [rsp+232]
- vmovdqu xmm11, OWORD PTR [rsp+248]
- vmovdqu xmm12, OWORD PTR [rsp+264]
- vmovdqu xmm13, OWORD PTR [rsp+280]
- vmovdqu xmm14, OWORD PTR [rsp+296]
- vmovdqu xmm15, OWORD PTR [rsp+312]
- add rsp, 328
- pop rbp
- pop r15
- pop r14
- pop rbx
- pop r12
- pop rsi
- pop rdi
- pop r13
- ret
- AES_GCM_decrypt_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_init_avx1 PROC
- push rdi
- push rsi
- push r12
- push r13
- mov rdi, rcx
- mov rsi, rdx
- mov r10, r8
- mov r11d, r9d
- mov rax, QWORD PTR [rsp+72]
- mov r8, QWORD PTR [rsp+80]
- mov r9, QWORD PTR [rsp+88]
- sub rsp, 80
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vmovdqu OWORD PTR [rsp+48], xmm8
- vmovdqu OWORD PTR [rsp+64], xmm15
- vpxor xmm4, xmm4, xmm4
- mov edx, r11d
- cmp edx, 12
- jne L_AES_GCM_init_avx1_iv_not_12
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- mov ecx, 16777216
- vmovq xmm4, QWORD PTR [r10]
- vpinsrd xmm4, xmm4, DWORD PTR [r10+8], 2
- vpinsrd xmm4, xmm4, ecx, 3
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqa xmm5, OWORD PTR [rdi]
- vpxor xmm1, xmm4, xmm5
- vmovdqa xmm6, OWORD PTR [rdi+16]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+32]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+48]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+64]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+80]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+96]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+112]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+128]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+144]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- cmp esi, 11
- vmovdqa xmm6, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+176]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- cmp esi, 13
- vmovdqa xmm6, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx1_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+208]
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm1, xmm1, xmm6
- vmovdqa xmm6, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx1_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm6
- vaesenclast xmm1, xmm1, xmm6
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vmovdqu xmm15, xmm1
- jmp L_AES_GCM_init_avx1_iv_done
- L_AES_GCM_init_avx1_iv_not_12:
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqa xmm5, OWORD PTR [rdi]
- vaesenc xmm5, xmm5, [rdi+16]
- vaesenc xmm5, xmm5, [rdi+32]
- vaesenc xmm5, xmm5, [rdi+48]
- vaesenc xmm5, xmm5, [rdi+64]
- vaesenc xmm5, xmm5, [rdi+80]
- vaesenc xmm5, xmm5, [rdi+96]
- vaesenc xmm5, xmm5, [rdi+112]
- vaesenc xmm5, xmm5, [rdi+128]
- vaesenc xmm5, xmm5, [rdi+144]
- cmp esi, 11
- vmovdqa xmm8, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm8
- vaesenc xmm5, xmm5, [rdi+176]
- cmp esi, 13
- vmovdqa xmm8, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx1_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm8
- vaesenc xmm5, xmm5, [rdi+208]
- vmovdqa xmm8, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx1_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm8
- vpshufb xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_init_avx1_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_init_avx1_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_init_avx1_calc_iv_16_loop:
- vmovdqu xmm7, OWORD PTR [r10+rcx]
- vpshufb xmm7, xmm7, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm7
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm6, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm6, xmm6, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm6, 31
- vpslld xmm1, xmm6, 30
- vpslld xmm2, xmm6, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm6, xmm6, xmm0
- vpsrld xmm2, xmm6, 1
- vpsrld xmm3, xmm6, 2
- vpsrld xmm0, xmm6, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm6
- vpxor xmm4, xmm4, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_init_avx1_calc_iv_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_init_avx1_calc_iv_done
- L_AES_GCM_init_avx1_calc_iv_lt16:
- sub rsp, 16
- vpxor xmm7, xmm7, xmm7
- xor r13d, r13d
- vmovdqu OWORD PTR [rsp], xmm7
- L_AES_GCM_init_avx1_calc_iv_loop:
- movzx r12d, BYTE PTR [r10+rcx]
- mov BYTE PTR [rsp+r13], r12b
- inc ecx
- inc r13d
- cmp ecx, edx
- jl L_AES_GCM_init_avx1_calc_iv_loop
- vmovdqu xmm7, OWORD PTR [rsp]
- add rsp, 16
- vpshufb xmm7, xmm7, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm7
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm6, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm6, xmm6, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm6, 31
- vpslld xmm1, xmm6, 30
- vpslld xmm2, xmm6, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm6, xmm6, xmm0
- vpsrld xmm2, xmm6, 1
- vpsrld xmm3, xmm6, 2
- vpsrld xmm0, xmm6, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm6
- vpxor xmm4, xmm4, xmm2
- L_AES_GCM_init_avx1_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm6, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm6, xmm6, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm6, 31
- vpslld xmm1, xmm6, 30
- vpslld xmm2, xmm6, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm6, xmm6, xmm0
- vpsrld xmm2, xmm6, 1
- vpsrld xmm3, xmm6, 2
- vpsrld xmm0, xmm6, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm6
- vpxor xmm4, xmm4, xmm2
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqa xmm7, OWORD PTR [rdi]
- vpxor xmm7, xmm7, xmm4
- vaesenc xmm7, xmm7, [rdi+16]
- vaesenc xmm7, xmm7, [rdi+32]
- vaesenc xmm7, xmm7, [rdi+48]
- vaesenc xmm7, xmm7, [rdi+64]
- vaesenc xmm7, xmm7, [rdi+80]
- vaesenc xmm7, xmm7, [rdi+96]
- vaesenc xmm7, xmm7, [rdi+112]
- vaesenc xmm7, xmm7, [rdi+128]
- vaesenc xmm7, xmm7, [rdi+144]
- cmp esi, 11
- vmovdqa xmm8, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm7, xmm7, xmm8
- vaesenc xmm7, xmm7, [rdi+176]
- cmp esi, 13
- vmovdqa xmm8, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx1_calc_iv_2_aesenc_avx_last
- vaesenc xmm7, xmm7, xmm8
- vaesenc xmm7, xmm7, [rdi+208]
- vmovdqa xmm8, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx1_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm7, xmm7, xmm8
- vmovdqu xmm15, xmm7
- L_AES_GCM_init_avx1_iv_done:
- vmovdqa OWORD PTR [r9], xmm15
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_one
- vmovdqa OWORD PTR [rax], xmm5
- vmovdqa OWORD PTR [r8], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm8, OWORD PTR [rsp+48]
- vmovdqu xmm15, OWORD PTR [rsp+64]
- add rsp, 80
- pop r13
- pop r12
- pop rsi
- pop rdi
- ret
- AES_GCM_init_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_aad_update_avx1 PROC
- mov rax, rcx
- sub rsp, 32
- vmovdqu OWORD PTR [rsp], xmm6
- vmovdqu OWORD PTR [rsp+16], xmm7
- vmovdqa xmm5, OWORD PTR [r8]
- vmovdqa xmm6, OWORD PTR [r9]
- xor ecx, ecx
- L_AES_GCM_aad_update_avx1_16_loop:
- vmovdqu xmm7, OWORD PTR [rax+rcx]
- vpshufb xmm7, xmm7, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm7
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm5, 78
- vpshufd xmm2, xmm6, 78
- vpclmulqdq xmm3, xmm6, xmm5, 17
- vpclmulqdq xmm0, xmm6, xmm5, 0
- vpxor xmm1, xmm1, xmm5
- vpxor xmm2, xmm2, xmm6
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm4, xmm0
- vmovdqa xmm5, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm5, xmm5, xmm1
- vpsrld xmm0, xmm4, 31
- vpsrld xmm1, xmm5, 31
- vpslld xmm4, xmm4, 1
- vpslld xmm5, xmm5, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm5, xmm5, xmm2
- vpor xmm4, xmm4, xmm0
- vpor xmm5, xmm5, xmm1
- vpslld xmm0, xmm4, 31
- vpslld xmm1, xmm4, 30
- vpslld xmm2, xmm4, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm4, xmm4, xmm0
- vpsrld xmm2, xmm4, 1
- vpsrld xmm3, xmm4, 2
- vpsrld xmm0, xmm4, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm4
- vpxor xmm5, xmm5, xmm2
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_aad_update_avx1_16_loop
- vmovdqa OWORD PTR [r8], xmm5
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp]
- vmovdqu xmm7, OWORD PTR [rsp+16]
- add rsp, 32
- ret
- AES_GCM_aad_update_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_block_avx1 PROC
- mov r10, r8
- mov r11, r9
- mov rax, QWORD PTR [rsp+40]
- vmovdqu xmm1, OWORD PTR [rax]
- vpshufb xmm0, xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm1, xmm1, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [rax], xmm1
- vpxor xmm0, xmm0, [rcx]
- vaesenc xmm0, xmm0, [rcx+16]
- vaesenc xmm0, xmm0, [rcx+32]
- vaesenc xmm0, xmm0, [rcx+48]
- vaesenc xmm0, xmm0, [rcx+64]
- vaesenc xmm0, xmm0, [rcx+80]
- vaesenc xmm0, xmm0, [rcx+96]
- vaesenc xmm0, xmm0, [rcx+112]
- vaesenc xmm0, xmm0, [rcx+128]
- vaesenc xmm0, xmm0, [rcx+144]
- cmp edx, 11
- vmovdqa xmm1, OWORD PTR [rcx+160]
- jl L_AES_GCM_encrypt_block_avx1_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vaesenc xmm0, xmm0, [rcx+176]
- cmp edx, 13
- vmovdqa xmm1, OWORD PTR [rcx+192]
- jl L_AES_GCM_encrypt_block_avx1_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vaesenc xmm0, xmm0, [rcx+208]
- vmovdqa xmm1, OWORD PTR [rcx+224]
- L_AES_GCM_encrypt_block_avx1_aesenc_block_last:
- vaesenclast xmm0, xmm0, xmm1
- vmovdqu xmm1, OWORD PTR [r11]
- vpxor xmm0, xmm0, xmm1
- vmovdqu OWORD PTR [r10], xmm0
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vzeroupper
- ret
- AES_GCM_encrypt_block_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_ghash_block_avx1 PROC
- sub rsp, 32
- vmovdqu OWORD PTR [rsp], xmm6
- vmovdqu OWORD PTR [rsp+16], xmm7
- vmovdqa xmm4, OWORD PTR [rdx]
- vmovdqa xmm5, OWORD PTR [r8]
- vmovdqu xmm7, OWORD PTR [rcx]
- vpshufb xmm7, xmm7, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm7
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm4, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpxor xmm1, xmm1, xmm4
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm6, xmm0
- vmovdqa xmm4, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm6, xmm6, xmm2
- vpxor xmm4, xmm4, xmm1
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- vpslld xmm0, xmm6, 31
- vpslld xmm1, xmm6, 30
- vpslld xmm2, xmm6, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm6, xmm6, xmm0
- vpsrld xmm2, xmm6, 1
- vpsrld xmm3, xmm6, 2
- vpsrld xmm0, xmm6, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm6
- vpxor xmm4, xmm4, xmm2
- vmovdqa OWORD PTR [rdx], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp]
- vmovdqu xmm7, OWORD PTR [rsp+16]
- add rsp, 32
- ret
- AES_GCM_ghash_block_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_update_avx1 PROC
- push r13
- push r12
- push r14
- push r15
- push rdi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov r15, QWORD PTR [rsp+104]
- sub rsp, 320
- vmovdqu OWORD PTR [rsp+160], xmm6
- vmovdqu OWORD PTR [rsp+176], xmm7
- vmovdqu OWORD PTR [rsp+192], xmm8
- vmovdqu OWORD PTR [rsp+208], xmm9
- vmovdqu OWORD PTR [rsp+224], xmm10
- vmovdqu OWORD PTR [rsp+240], xmm11
- vmovdqu OWORD PTR [rsp+256], xmm12
- vmovdqu OWORD PTR [rsp+272], xmm13
- vmovdqu OWORD PTR [rsp+288], xmm14
- vmovdqu OWORD PTR [rsp+304], xmm15
- vmovdqa xmm6, OWORD PTR [r12]
- vmovdqa xmm5, OWORD PTR [r14]
- vpsrlq xmm9, xmm5, 63
- vpsllq xmm8, xmm5, 1
- vpslldq xmm9, xmm9, 8
- vpor xmm8, xmm8, xmm9
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm8
- xor edi, edi
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_encrypt_update_avx1_done_128
- and r13d, 4294967168
- vmovdqa xmm2, xmm6
- ; H ^ 1
- vmovdqu OWORD PTR [rsp], xmm5
- ; H ^ 2
- vpclmulqdq xmm8, xmm5, xmm5, 0
- vpclmulqdq xmm0, xmm5, xmm5, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm0, xmm0, xmm14
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm0, 78
- vpclmulqdq xmm11, xmm0, xmm5, 17
- vpclmulqdq xmm8, xmm0, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm0
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm1, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm1, xmm1, xmm14
- vmovdqu OWORD PTR [rsp+32], xmm1
- ; H ^ 4
- vpclmulqdq xmm8, xmm0, xmm0, 0
- vpclmulqdq xmm3, xmm0, xmm0, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm3, xmm3, xmm14
- vmovdqu OWORD PTR [rsp+48], xmm3
- ; H ^ 5
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm0, 78
- vpshufd xmm10, xmm1, 78
- vpclmulqdq xmm11, xmm1, xmm0, 17
- vpclmulqdq xmm8, xmm1, xmm0, 0
- vpxor xmm9, xmm9, xmm0
- vpxor xmm10, xmm10, xmm1
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+64], xmm7
- ; H ^ 6
- vpclmulqdq xmm8, xmm1, xmm1, 0
- vpclmulqdq xmm7, xmm1, xmm1, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+80], xmm7
- ; H ^ 7
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm1, 78
- vpshufd xmm10, xmm3, 78
- vpclmulqdq xmm11, xmm3, xmm1, 17
- vpclmulqdq xmm8, xmm3, xmm1, 0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm3
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+96], xmm7
- ; H ^ 8
- vpclmulqdq xmm8, xmm3, xmm3, 0
- vpclmulqdq xmm7, xmm3, xmm3, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+112], xmm7
- ; First 128 bytes of input
- vmovdqu xmm0, OWORD PTR [r15]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [r15], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+16]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+32]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+48]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+64]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+80]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+96]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+112]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+128]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+144]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 11
- vmovdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx1_aesenc_128_enc_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [r11]
- vmovdqu xmm1, OWORD PTR [r11+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [r10], xmm8
- vmovdqu OWORD PTR [r10+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [r11+32]
- vmovdqu xmm1, OWORD PTR [r11+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [r10+32], xmm10
- vmovdqu OWORD PTR [r10+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [r11+64]
- vmovdqu xmm1, OWORD PTR [r11+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [r10+64], xmm12
- vmovdqu OWORD PTR [r10+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [r11+96]
- vmovdqu xmm1, OWORD PTR [r11+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [r10+96], xmm14
- vmovdqu OWORD PTR [r10+112], xmm15
- cmp r13d, 128
- mov edi, 128
- jle L_AES_GCM_encrypt_update_avx1_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_update_avx1_ghash_128:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- vmovdqu xmm0, OWORD PTR [r15]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [r15], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vmovdqu xmm0, OWORD PTR [rdx+-128]
- vaesenc xmm8, xmm8, [rax+16]
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm2
- vpshufd xmm1, xmm7, 78
- vpshufd xmm5, xmm0, 78
- vpxor xmm1, xmm1, xmm7
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm3, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+16]
- vaesenc xmm10, xmm10, [rax+16]
- vpclmulqdq xmm2, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+16]
- vaesenc xmm12, xmm12, [rax+16]
- vpclmulqdq xmm1, xmm1, xmm5, 0
- vaesenc xmm13, xmm13, [rax+16]
- vaesenc xmm14, xmm14, [rax+16]
- vaesenc xmm15, xmm15, [rax+16]
- vpxor xmm1, xmm1, xmm2
- vpxor xmm1, xmm1, xmm3
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm0, OWORD PTR [rdx+-112]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+32]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+32]
- vaesenc xmm10, xmm10, [rax+32]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+32]
- vaesenc xmm12, xmm12, [rax+32]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+32]
- vaesenc xmm14, xmm14, [rax+32]
- vaesenc xmm15, xmm15, [rax+32]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vmovdqu xmm0, OWORD PTR [rdx+-96]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+48]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+48]
- vaesenc xmm10, xmm10, [rax+48]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+48]
- vaesenc xmm12, xmm12, [rax+48]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+48]
- vaesenc xmm14, xmm14, [rax+48]
- vaesenc xmm15, xmm15, [rax+48]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm0, OWORD PTR [rdx+-80]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+64]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+64]
- vaesenc xmm10, xmm10, [rax+64]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+64]
- vaesenc xmm12, xmm12, [rax+64]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+64]
- vaesenc xmm14, xmm14, [rax+64]
- vaesenc xmm15, xmm15, [rax+64]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vmovdqu xmm0, OWORD PTR [rdx+-64]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+80]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+80]
- vaesenc xmm10, xmm10, [rax+80]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+80]
- vaesenc xmm12, xmm12, [rax+80]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+80]
- vaesenc xmm14, xmm14, [rax+80]
- vaesenc xmm15, xmm15, [rax+80]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm0, OWORD PTR [rdx+-48]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+96]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+96]
- vaesenc xmm10, xmm10, [rax+96]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+96]
- vaesenc xmm12, xmm12, [rax+96]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+96]
- vaesenc xmm14, xmm14, [rax+96]
- vaesenc xmm15, xmm15, [rax+96]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vmovdqu xmm0, OWORD PTR [rdx+-32]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+112]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+112]
- vaesenc xmm10, xmm10, [rax+112]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+112]
- vaesenc xmm12, xmm12, [rax+112]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+112]
- vaesenc xmm14, xmm14, [rax+112]
- vaesenc xmm15, xmm15, [rax+112]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm0, OWORD PTR [rdx+-16]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+128]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+128]
- vaesenc xmm10, xmm10, [rax+128]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+128]
- vaesenc xmm12, xmm12, [rax+128]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+128]
- vaesenc xmm14, xmm14, [rax+128]
- vaesenc xmm15, xmm15, [rax+128]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vpslldq xmm5, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vaesenc xmm8, xmm8, [rax+144]
- vpxor xmm2, xmm2, xmm5
- vpxor xmm3, xmm3, xmm1
- vaesenc xmm9, xmm9, [rax+144]
- vpslld xmm7, xmm2, 31
- vpslld xmm4, xmm2, 30
- vpslld xmm5, xmm2, 25
- vaesenc xmm10, xmm10, [rax+144]
- vpxor xmm7, xmm7, xmm4
- vpxor xmm7, xmm7, xmm5
- vaesenc xmm11, xmm11, [rax+144]
- vpsrldq xmm4, xmm7, 4
- vpslldq xmm7, xmm7, 12
- vaesenc xmm12, xmm12, [rax+144]
- vpxor xmm2, xmm2, xmm7
- vpsrld xmm5, xmm2, 1
- vaesenc xmm13, xmm13, [rax+144]
- vpsrld xmm1, xmm2, 2
- vpsrld xmm0, xmm2, 7
- vaesenc xmm14, xmm14, [rax+144]
- vpxor xmm5, xmm5, xmm1
- vpxor xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, [rax+144]
- vpxor xmm5, xmm5, xmm4
- vpxor xmm2, xmm2, xmm5
- vpxor xmm2, xmm2, xmm3
- cmp r8d, 11
- vmovdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx1_aesenc_128_ghash_avx_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- add edi, 128
- cmp edi, r13d
- jl L_AES_GCM_encrypt_update_avx1_ghash_128
- L_AES_GCM_encrypt_update_avx1_end_128:
- vmovdqa xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpshufb xmm8, xmm8, xmm4
- vpshufb xmm9, xmm9, xmm4
- vpshufb xmm10, xmm10, xmm4
- vpshufb xmm11, xmm11, xmm4
- vpxor xmm8, xmm8, xmm2
- vpshufb xmm12, xmm12, xmm4
- vpshufb xmm13, xmm13, xmm4
- vpshufb xmm14, xmm14, xmm4
- vpshufb xmm15, xmm15, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm5, OWORD PTR [rsp+16]
- ; ghash_gfmul_avx
- vpshufd xmm1, xmm15, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm15, 17
- vpclmulqdq xmm0, xmm7, xmm15, 0
- vpxor xmm1, xmm1, xmm15
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vmovdqa xmm4, xmm0
- vmovdqa xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm14, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm14, 17
- vpclmulqdq xmm0, xmm5, xmm14, 0
- vpxor xmm1, xmm1, xmm14
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm5, OWORD PTR [rsp+48]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm13, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm13, 17
- vpclmulqdq xmm0, xmm7, xmm13, 0
- vpxor xmm1, xmm1, xmm13
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm12, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm12, 17
- vpclmulqdq xmm0, xmm5, xmm12, 0
- vpxor xmm1, xmm1, xmm12
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm5, OWORD PTR [rsp+80]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm11, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm11, 17
- vpclmulqdq xmm0, xmm7, xmm11, 0
- vpxor xmm1, xmm1, xmm11
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm10, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm10, 17
- vpclmulqdq xmm0, xmm5, xmm10, 0
- vpxor xmm1, xmm1, xmm10
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm5, OWORD PTR [rsp+112]
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm9, 78
- vpshufd xmm2, xmm7, 78
- vpclmulqdq xmm3, xmm7, xmm9, 17
- vpclmulqdq xmm0, xmm7, xmm9, 0
- vpxor xmm1, xmm1, xmm9
- vpxor xmm2, xmm2, xmm7
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- ; ghash_gfmul_xor_avx
- vpshufd xmm1, xmm8, 78
- vpshufd xmm2, xmm5, 78
- vpclmulqdq xmm3, xmm5, xmm8, 17
- vpclmulqdq xmm0, xmm5, xmm8, 0
- vpxor xmm1, xmm1, xmm8
- vpxor xmm2, xmm2, xmm5
- vpclmulqdq xmm1, xmm1, xmm2, 0
- vpxor xmm1, xmm1, xmm0
- vpxor xmm1, xmm1, xmm3
- vpxor xmm4, xmm4, xmm0
- vpxor xmm6, xmm6, xmm3
- vpslldq xmm2, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vpxor xmm4, xmm4, xmm2
- vpxor xmm6, xmm6, xmm1
- vpslld xmm0, xmm4, 31
- vpslld xmm1, xmm4, 30
- vpslld xmm2, xmm4, 25
- vpxor xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm2
- vmovdqa xmm1, xmm0
- vpsrldq xmm1, xmm1, 4
- vpslldq xmm0, xmm0, 12
- vpxor xmm4, xmm4, xmm0
- vpsrld xmm2, xmm4, 1
- vpsrld xmm3, xmm4, 2
- vpsrld xmm0, xmm4, 7
- vpxor xmm2, xmm2, xmm3
- vpxor xmm2, xmm2, xmm0
- vpxor xmm2, xmm2, xmm1
- vpxor xmm2, xmm2, xmm4
- vpxor xmm6, xmm6, xmm2
- vmovdqu xmm5, OWORD PTR [rsp]
- L_AES_GCM_encrypt_update_avx1_done_128:
- mov edx, r9d
- cmp edi, edx
- jge L_AES_GCM_encrypt_update_avx1_done_enc
- mov r13d, r9d
- and r13d, 4294967280
- cmp edi, r13d
- jge L_AES_GCM_encrypt_update_avx1_last_block_done
- vmovdqu xmm9, OWORD PTR [r15]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [r15], xmm9
- vpxor xmm8, xmm8, [rax]
- vaesenc xmm8, xmm8, [rax+16]
- vaesenc xmm8, xmm8, [rax+32]
- vaesenc xmm8, xmm8, [rax+48]
- vaesenc xmm8, xmm8, [rax+64]
- vaesenc xmm8, xmm8, [rax+80]
- vaesenc xmm8, xmm8, [rax+96]
- vaesenc xmm8, xmm8, [rax+112]
- vaesenc xmm8, xmm8, [rax+128]
- vaesenc xmm8, xmm8, [rax+144]
- cmp r8d, 11
- vmovdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_block_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+176]
- cmp r8d, 13
- vmovdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_block_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+208]
- vmovdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx1_aesenc_block_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqu xmm9, OWORD PTR [r11+rdi]
- vpxor xmm8, xmm8, xmm9
- vmovdqu OWORD PTR [r10+rdi], xmm8
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm8
- add edi, 16
- cmp edi, r13d
- jge L_AES_GCM_encrypt_update_avx1_last_block_ghash
- L_AES_GCM_encrypt_update_avx1_last_block_start:
- vmovdqu xmm13, OWORD PTR [r11+rdi]
- vmovdqu xmm9, OWORD PTR [r15]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [r15], xmm9
- vpxor xmm8, xmm8, [rax]
- vpclmulqdq xmm10, xmm6, xmm5, 16
- vaesenc xmm8, xmm8, [rax+16]
- vaesenc xmm8, xmm8, [rax+32]
- vpclmulqdq xmm11, xmm6, xmm5, 1
- vaesenc xmm8, xmm8, [rax+48]
- vaesenc xmm8, xmm8, [rax+64]
- vpclmulqdq xmm12, xmm6, xmm5, 0
- vaesenc xmm8, xmm8, [rax+80]
- vpclmulqdq xmm1, xmm6, xmm5, 17
- vaesenc xmm8, xmm8, [rax+96]
- vpxor xmm10, xmm10, xmm11
- vpslldq xmm2, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vaesenc xmm8, xmm8, [rax+112]
- vpxor xmm2, xmm2, xmm12
- vpxor xmm3, xmm1, xmm10
- vmovdqa xmm0, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpclmulqdq xmm11, xmm2, xmm0, 16
- vaesenc xmm8, xmm8, [rax+128]
- vpshufd xmm10, xmm2, 78
- vpxor xmm10, xmm10, xmm11
- vpclmulqdq xmm11, xmm10, xmm0, 16
- vaesenc xmm8, xmm8, [rax+144]
- vpshufd xmm10, xmm10, 78
- vpxor xmm10, xmm10, xmm11
- vpxor xmm6, xmm10, xmm3
- cmp r8d, 11
- vmovdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+176]
- cmp r8d, 13
- vmovdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+208]
- vmovdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx1_aesenc_gfmul_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqa xmm0, xmm13
- vpxor xmm8, xmm8, xmm0
- vmovdqu OWORD PTR [r10+rdi], xmm8
- vpshufb xmm8, xmm8, OWORD PTR L_avx1_aes_gcm_bswap_mask
- add edi, 16
- vpxor xmm6, xmm6, xmm8
- cmp edi, r13d
- jl L_AES_GCM_encrypt_update_avx1_last_block_start
- L_AES_GCM_encrypt_update_avx1_last_block_ghash:
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm6, 78
- vpclmulqdq xmm11, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm6
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm6, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm6, xmm6, xmm14
- L_AES_GCM_encrypt_update_avx1_last_block_done:
- L_AES_GCM_encrypt_update_avx1_done_enc:
- vmovdqa OWORD PTR [r12], xmm6
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+160]
- vmovdqu xmm7, OWORD PTR [rsp+176]
- vmovdqu xmm8, OWORD PTR [rsp+192]
- vmovdqu xmm9, OWORD PTR [rsp+208]
- vmovdqu xmm10, OWORD PTR [rsp+224]
- vmovdqu xmm11, OWORD PTR [rsp+240]
- vmovdqu xmm12, OWORD PTR [rsp+256]
- vmovdqu xmm13, OWORD PTR [rsp+272]
- vmovdqu xmm14, OWORD PTR [rsp+288]
- vmovdqu xmm15, OWORD PTR [rsp+304]
- add rsp, 320
- pop rdi
- pop r15
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_encrypt_update_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_final_avx1 PROC
- push r13
- push r12
- push r14
- mov rax, rcx
- mov r10d, r9d
- mov r9, rdx
- mov r11d, DWORD PTR [rsp+64]
- mov r12, QWORD PTR [rsp+72]
- mov r14, QWORD PTR [rsp+80]
- sub rsp, 144
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vmovdqu OWORD PTR [rsp+48], xmm8
- vmovdqu OWORD PTR [rsp+64], xmm9
- vmovdqu OWORD PTR [rsp+80], xmm10
- vmovdqu OWORD PTR [rsp+96], xmm11
- vmovdqu OWORD PTR [rsp+112], xmm12
- vmovdqu OWORD PTR [rsp+128], xmm13
- vmovdqa xmm4, OWORD PTR [rax]
- vmovdqa xmm5, OWORD PTR [r12]
- vmovdqa xmm6, OWORD PTR [r14]
- vpsrlq xmm8, xmm5, 63
- vpsllq xmm7, xmm5, 1
- vpslldq xmm8, xmm8, 8
- vpor xmm7, xmm7, xmm8
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm7
- mov edx, r10d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- vmovq xmm0, rdx
- vmovq xmm1, rcx
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_red_avx
- vpshufd xmm8, xmm5, 78
- vpshufd xmm9, xmm4, 78
- vpclmulqdq xmm10, xmm4, xmm5, 17
- vpclmulqdq xmm7, xmm4, xmm5, 0
- vpxor xmm8, xmm8, xmm5
- vpxor xmm9, xmm9, xmm4
- vpclmulqdq xmm8, xmm8, xmm9, 0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm8, xmm8, xmm10
- vpslldq xmm9, xmm8, 8
- vpsrldq xmm8, xmm8, 8
- vpxor xmm7, xmm7, xmm9
- vpxor xmm4, xmm10, xmm8
- vpslld xmm11, xmm7, 31
- vpslld xmm12, xmm7, 30
- vpslld xmm13, xmm7, 25
- vpxor xmm11, xmm11, xmm12
- vpxor xmm11, xmm11, xmm13
- vpsrldq xmm12, xmm11, 4
- vpslldq xmm11, xmm11, 12
- vpxor xmm7, xmm7, xmm11
- vpsrld xmm13, xmm7, 1
- vpsrld xmm9, xmm7, 2
- vpsrld xmm8, xmm7, 7
- vpxor xmm13, xmm13, xmm9
- vpxor xmm13, xmm13, xmm8
- vpxor xmm13, xmm13, xmm12
- vpxor xmm13, xmm13, xmm7
- vpxor xmm4, xmm4, xmm13
- vpshufb xmm4, xmm4, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm4, xmm6
- cmp r8d, 16
- je L_AES_GCM_encrypt_final_avx1_store_tag_16
- xor rcx, rcx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_final_avx1_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r9+rcx], r13b
- inc ecx
- cmp ecx, r8d
- jne L_AES_GCM_encrypt_final_avx1_store_tag_loop
- jmp L_AES_GCM_encrypt_final_avx1_store_tag_done
- L_AES_GCM_encrypt_final_avx1_store_tag_16:
- vmovdqu OWORD PTR [r9], xmm0
- L_AES_GCM_encrypt_final_avx1_store_tag_done:
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm8, OWORD PTR [rsp+48]
- vmovdqu xmm9, OWORD PTR [rsp+64]
- vmovdqu xmm10, OWORD PTR [rsp+80]
- vmovdqu xmm11, OWORD PTR [rsp+96]
- vmovdqu xmm12, OWORD PTR [rsp+112]
- vmovdqu xmm13, OWORD PTR [rsp+128]
- add rsp, 144
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_encrypt_final_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_update_avx1 PROC
- push r13
- push r12
- push r14
- push r15
- push rdi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov r15, QWORD PTR [rsp+104]
- sub rsp, 328
- vmovdqu OWORD PTR [rsp+168], xmm6
- vmovdqu OWORD PTR [rsp+184], xmm7
- vmovdqu OWORD PTR [rsp+200], xmm8
- vmovdqu OWORD PTR [rsp+216], xmm9
- vmovdqu OWORD PTR [rsp+232], xmm10
- vmovdqu OWORD PTR [rsp+248], xmm11
- vmovdqu OWORD PTR [rsp+264], xmm12
- vmovdqu OWORD PTR [rsp+280], xmm13
- vmovdqu OWORD PTR [rsp+296], xmm14
- vmovdqu OWORD PTR [rsp+312], xmm15
- vmovdqa xmm6, OWORD PTR [r12]
- vmovdqa xmm5, OWORD PTR [r14]
- vpsrlq xmm9, xmm5, 63
- vpsllq xmm8, xmm5, 1
- vpslldq xmm9, xmm9, 8
- vpor xmm8, xmm8, xmm9
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm8
- xor edi, edi
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_decrypt_update_avx1_done_128
- and r13d, 4294967168
- vmovdqa xmm2, xmm6
- ; H ^ 1
- vmovdqu OWORD PTR [rsp], xmm5
- ; H ^ 2
- vpclmulqdq xmm8, xmm5, xmm5, 0
- vpclmulqdq xmm0, xmm5, xmm5, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm0, xmm0, xmm14
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm5, 78
- vpshufd xmm10, xmm0, 78
- vpclmulqdq xmm11, xmm0, xmm5, 17
- vpclmulqdq xmm8, xmm0, xmm5, 0
- vpxor xmm9, xmm9, xmm5
- vpxor xmm10, xmm10, xmm0
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm1, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm1, xmm1, xmm14
- vmovdqu OWORD PTR [rsp+32], xmm1
- ; H ^ 4
- vpclmulqdq xmm8, xmm0, xmm0, 0
- vpclmulqdq xmm3, xmm0, xmm0, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm3, xmm3, xmm14
- vmovdqu OWORD PTR [rsp+48], xmm3
- ; H ^ 5
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm0, 78
- vpshufd xmm10, xmm1, 78
- vpclmulqdq xmm11, xmm1, xmm0, 17
- vpclmulqdq xmm8, xmm1, xmm0, 0
- vpxor xmm9, xmm9, xmm0
- vpxor xmm10, xmm10, xmm1
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+64], xmm7
- ; H ^ 6
- vpclmulqdq xmm8, xmm1, xmm1, 0
- vpclmulqdq xmm7, xmm1, xmm1, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+80], xmm7
- ; H ^ 7
- ; ghash_gfmul_red_avx
- vpshufd xmm9, xmm1, 78
- vpshufd xmm10, xmm3, 78
- vpclmulqdq xmm11, xmm3, xmm1, 17
- vpclmulqdq xmm8, xmm3, xmm1, 0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm3
- vpclmulqdq xmm9, xmm9, xmm10, 0
- vpxor xmm9, xmm9, xmm8
- vpxor xmm9, xmm9, xmm11
- vpslldq xmm10, xmm9, 8
- vpsrldq xmm9, xmm9, 8
- vpxor xmm8, xmm8, xmm10
- vpxor xmm7, xmm11, xmm9
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+96], xmm7
- ; H ^ 8
- vpclmulqdq xmm8, xmm3, xmm3, 0
- vpclmulqdq xmm7, xmm3, xmm3, 17
- vpslld xmm12, xmm8, 31
- vpslld xmm13, xmm8, 30
- vpslld xmm14, xmm8, 25
- vpxor xmm12, xmm12, xmm13
- vpxor xmm12, xmm12, xmm14
- vpsrldq xmm13, xmm12, 4
- vpslldq xmm12, xmm12, 12
- vpxor xmm8, xmm8, xmm12
- vpsrld xmm14, xmm8, 1
- vpsrld xmm10, xmm8, 2
- vpsrld xmm9, xmm8, 7
- vpxor xmm14, xmm14, xmm10
- vpxor xmm14, xmm14, xmm9
- vpxor xmm14, xmm14, xmm13
- vpxor xmm14, xmm14, xmm8
- vpxor xmm7, xmm7, xmm14
- vmovdqu OWORD PTR [rsp+112], xmm7
- L_AES_GCM_decrypt_update_avx1_ghash_128:
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- vmovdqu xmm0, OWORD PTR [r15]
- vmovdqa xmm1, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm9, xmm0, OWORD PTR L_avx1_aes_gcm_one
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx1_aes_gcm_two
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx1_aes_gcm_three
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx1_aes_gcm_four
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx1_aes_gcm_five
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx1_aes_gcm_six
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx1_aes_gcm_seven
- vpshufb xmm15, xmm15, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_eight
- vmovdqa xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [r15], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vmovdqu xmm0, OWORD PTR [rcx]
- vaesenc xmm8, xmm8, [rax+16]
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm2
- vpshufd xmm1, xmm7, 78
- vpshufd xmm5, xmm0, 78
- vpxor xmm1, xmm1, xmm7
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm3, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+16]
- vaesenc xmm10, xmm10, [rax+16]
- vpclmulqdq xmm2, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+16]
- vaesenc xmm12, xmm12, [rax+16]
- vpclmulqdq xmm1, xmm1, xmm5, 0
- vaesenc xmm13, xmm13, [rax+16]
- vaesenc xmm14, xmm14, [rax+16]
- vaesenc xmm15, xmm15, [rax+16]
- vpxor xmm1, xmm1, xmm2
- vpxor xmm1, xmm1, xmm3
- vmovdqu xmm7, OWORD PTR [rsp+96]
- vmovdqu xmm0, OWORD PTR [rcx+16]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+32]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+32]
- vaesenc xmm10, xmm10, [rax+32]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+32]
- vaesenc xmm12, xmm12, [rax+32]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+32]
- vaesenc xmm14, xmm14, [rax+32]
- vaesenc xmm15, xmm15, [rax+32]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+48]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+48]
- vaesenc xmm10, xmm10, [rax+48]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+48]
- vaesenc xmm12, xmm12, [rax+48]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+48]
- vaesenc xmm14, xmm14, [rax+48]
- vaesenc xmm15, xmm15, [rax+48]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+64]
- vmovdqu xmm0, OWORD PTR [rcx+48]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+64]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+64]
- vaesenc xmm10, xmm10, [rax+64]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+64]
- vaesenc xmm12, xmm12, [rax+64]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+64]
- vaesenc xmm14, xmm14, [rax+64]
- vaesenc xmm15, xmm15, [rax+64]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+80]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+80]
- vaesenc xmm10, xmm10, [rax+80]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+80]
- vaesenc xmm12, xmm12, [rax+80]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+80]
- vaesenc xmm14, xmm14, [rax+80]
- vaesenc xmm15, xmm15, [rax+80]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm0, OWORD PTR [rcx+80]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+96]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+96]
- vaesenc xmm10, xmm10, [rax+96]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+96]
- vaesenc xmm12, xmm12, [rax+96]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+96]
- vaesenc xmm14, xmm14, [rax+96]
- vaesenc xmm15, xmm15, [rax+96]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+112]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+112]
- vaesenc xmm10, xmm10, [rax+112]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+112]
- vaesenc xmm12, xmm12, [rax+112]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+112]
- vaesenc xmm14, xmm14, [rax+112]
- vaesenc xmm15, xmm15, [rax+112]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vmovdqu xmm7, OWORD PTR [rsp]
- vmovdqu xmm0, OWORD PTR [rcx+112]
- vpshufd xmm4, xmm7, 78
- vpshufb xmm0, xmm0, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vaesenc xmm8, xmm8, [rax+128]
- vpxor xmm4, xmm4, xmm7
- vpshufd xmm5, xmm0, 78
- vpxor xmm5, xmm5, xmm0
- vpclmulqdq xmm6, xmm0, xmm7, 17
- vaesenc xmm9, xmm9, [rax+128]
- vaesenc xmm10, xmm10, [rax+128]
- vpclmulqdq xmm7, xmm0, xmm7, 0
- vaesenc xmm11, xmm11, [rax+128]
- vaesenc xmm12, xmm12, [rax+128]
- vpclmulqdq xmm4, xmm4, xmm5, 0
- vaesenc xmm13, xmm13, [rax+128]
- vaesenc xmm14, xmm14, [rax+128]
- vaesenc xmm15, xmm15, [rax+128]
- vpxor xmm1, xmm1, xmm7
- vpxor xmm2, xmm2, xmm7
- vpxor xmm1, xmm1, xmm6
- vpxor xmm3, xmm3, xmm6
- vpxor xmm1, xmm1, xmm4
- vpslldq xmm5, xmm1, 8
- vpsrldq xmm1, xmm1, 8
- vaesenc xmm8, xmm8, [rax+144]
- vpxor xmm2, xmm2, xmm5
- vpxor xmm3, xmm3, xmm1
- vaesenc xmm9, xmm9, [rax+144]
- vpslld xmm7, xmm2, 31
- vpslld xmm4, xmm2, 30
- vpslld xmm5, xmm2, 25
- vaesenc xmm10, xmm10, [rax+144]
- vpxor xmm7, xmm7, xmm4
- vpxor xmm7, xmm7, xmm5
- vaesenc xmm11, xmm11, [rax+144]
- vpsrldq xmm4, xmm7, 4
- vpslldq xmm7, xmm7, 12
- vaesenc xmm12, xmm12, [rax+144]
- vpxor xmm2, xmm2, xmm7
- vpsrld xmm5, xmm2, 1
- vaesenc xmm13, xmm13, [rax+144]
- vpsrld xmm1, xmm2, 2
- vpsrld xmm0, xmm2, 7
- vaesenc xmm14, xmm14, [rax+144]
- vpxor xmm5, xmm5, xmm1
- vpxor xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, [rax+144]
- vpxor xmm5, xmm5, xmm4
- vpxor xmm2, xmm2, xmm5
- vpxor xmm2, xmm2, xmm3
- cmp r8d, 11
- vmovdqa xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_decrypt_update_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqa xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_decrypt_update_avx1_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqa xmm7, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_avx1_aesenc_128_ghash_avx_done:
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+32]
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vpxor xmm10, xmm10, xmm0
- vpxor xmm11, xmm11, xmm1
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+96]
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vpxor xmm14, xmm14, xmm0
- vpxor xmm15, xmm15, xmm1
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- add edi, 128
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_avx1_ghash_128
- vmovdqa xmm6, xmm2
- vmovdqu xmm5, OWORD PTR [rsp]
- L_AES_GCM_decrypt_update_avx1_done_128:
- mov edx, r9d
- cmp edi, edx
- jge L_AES_GCM_decrypt_update_avx1_done_dec
- mov r13d, r9d
- and r13d, 4294967280
- cmp edi, r13d
- jge L_AES_GCM_decrypt_update_avx1_last_block_done
- L_AES_GCM_decrypt_update_avx1_last_block_start:
- vmovdqu xmm13, OWORD PTR [r11+rdi]
- vmovdqa xmm0, xmm5
- vpshufb xmm1, xmm13, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm1, xmm1, xmm6
- vmovdqu xmm9, OWORD PTR [r15]
- vpshufb xmm8, xmm9, OWORD PTR L_avx1_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm9, OWORD PTR L_avx1_aes_gcm_one
- vmovdqu OWORD PTR [r15], xmm9
- vpxor xmm8, xmm8, [rax]
- vpclmulqdq xmm10, xmm1, xmm0, 16
- vaesenc xmm8, xmm8, [rax+16]
- vaesenc xmm8, xmm8, [rax+32]
- vpclmulqdq xmm11, xmm1, xmm0, 1
- vaesenc xmm8, xmm8, [rax+48]
- vaesenc xmm8, xmm8, [rax+64]
- vpclmulqdq xmm12, xmm1, xmm0, 0
- vaesenc xmm8, xmm8, [rax+80]
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vaesenc xmm8, xmm8, [rax+96]
- vpxor xmm10, xmm10, xmm11
- vpslldq xmm2, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vaesenc xmm8, xmm8, [rax+112]
- vpxor xmm2, xmm2, xmm12
- vpxor xmm3, xmm1, xmm10
- vmovdqa xmm0, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpclmulqdq xmm11, xmm2, xmm0, 16
- vaesenc xmm8, xmm8, [rax+128]
- vpshufd xmm10, xmm2, 78
- vpxor xmm10, xmm10, xmm11
- vpclmulqdq xmm11, xmm10, xmm0, 16
- vaesenc xmm8, xmm8, [rax+144]
- vpshufd xmm10, xmm10, 78
- vpxor xmm10, xmm10, xmm11
- vpxor xmm6, xmm10, xmm3
- cmp r8d, 11
- vmovdqa xmm9, OWORD PTR [rax+160]
- jl L_AES_GCM_decrypt_update_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+176]
- cmp r8d, 13
- vmovdqa xmm9, OWORD PTR [rax+192]
- jl L_AES_GCM_decrypt_update_avx1_aesenc_gfmul_last
- vaesenc xmm8, xmm8, xmm9
- vaesenc xmm8, xmm8, [rax+208]
- vmovdqa xmm9, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_avx1_aesenc_gfmul_last:
- vaesenclast xmm8, xmm8, xmm9
- vmovdqa xmm0, xmm13
- vpxor xmm8, xmm8, xmm0
- vmovdqu OWORD PTR [r10+rdi], xmm8
- add edi, 16
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_avx1_last_block_start
- L_AES_GCM_decrypt_update_avx1_last_block_done:
- L_AES_GCM_decrypt_update_avx1_done_dec:
- vmovdqa OWORD PTR [r12], xmm6
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+168]
- vmovdqu xmm7, OWORD PTR [rsp+184]
- vmovdqu xmm8, OWORD PTR [rsp+200]
- vmovdqu xmm9, OWORD PTR [rsp+216]
- vmovdqu xmm10, OWORD PTR [rsp+232]
- vmovdqu xmm11, OWORD PTR [rsp+248]
- vmovdqu xmm12, OWORD PTR [rsp+264]
- vmovdqu xmm13, OWORD PTR [rsp+280]
- vmovdqu xmm14, OWORD PTR [rsp+296]
- vmovdqu xmm15, OWORD PTR [rsp+312]
- add rsp, 328
- pop rdi
- pop r15
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_decrypt_update_avx1 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_final_avx1 PROC
- push r13
- push r12
- push r14
- push rbp
- push r15
- mov rax, rcx
- mov r10d, r9d
- mov r9, rdx
- mov r11d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov rbp, QWORD PTR [rsp+104]
- sub rsp, 160
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vmovdqu OWORD PTR [rsp+48], xmm8
- vmovdqu OWORD PTR [rsp+64], xmm9
- vmovdqu OWORD PTR [rsp+80], xmm10
- vmovdqu OWORD PTR [rsp+96], xmm11
- vmovdqu OWORD PTR [rsp+112], xmm12
- vmovdqu OWORD PTR [rsp+128], xmm13
- vmovdqu OWORD PTR [rsp+144], xmm15
- vmovdqa xmm6, OWORD PTR [rax]
- vmovdqa xmm5, OWORD PTR [r12]
- vmovdqa xmm15, OWORD PTR [r14]
- vpsrlq xmm8, xmm5, 63
- vpsllq xmm7, xmm5, 1
- vpslldq xmm8, xmm8, 8
- vpor xmm7, xmm7, xmm8
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx1_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm7
- mov edx, r10d
- mov ecx, r11d
- shl rdx, 3
- shl rcx, 3
- vmovq xmm0, rdx
- vmovq xmm1, rcx
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_red_avx
- vpshufd xmm8, xmm5, 78
- vpshufd xmm9, xmm6, 78
- vpclmulqdq xmm10, xmm6, xmm5, 17
- vpclmulqdq xmm7, xmm6, xmm5, 0
- vpxor xmm8, xmm8, xmm5
- vpxor xmm9, xmm9, xmm6
- vpclmulqdq xmm8, xmm8, xmm9, 0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm8, xmm8, xmm10
- vpslldq xmm9, xmm8, 8
- vpsrldq xmm8, xmm8, 8
- vpxor xmm7, xmm7, xmm9
- vpxor xmm6, xmm10, xmm8
- vpslld xmm11, xmm7, 31
- vpslld xmm12, xmm7, 30
- vpslld xmm13, xmm7, 25
- vpxor xmm11, xmm11, xmm12
- vpxor xmm11, xmm11, xmm13
- vpsrldq xmm12, xmm11, 4
- vpslldq xmm11, xmm11, 12
- vpxor xmm7, xmm7, xmm11
- vpsrld xmm13, xmm7, 1
- vpsrld xmm9, xmm7, 2
- vpsrld xmm8, xmm7, 7
- vpxor xmm13, xmm13, xmm9
- vpxor xmm13, xmm13, xmm8
- vpxor xmm13, xmm13, xmm12
- vpxor xmm13, xmm13, xmm7
- vpxor xmm6, xmm6, xmm13
- vpshufb xmm6, xmm6, OWORD PTR L_avx1_aes_gcm_bswap_mask
- vpxor xmm0, xmm6, xmm15
- cmp r8d, 16
- je L_AES_GCM_decrypt_final_avx1_cmp_tag_16
- sub rsp, 16
- xor rcx, rcx
- xor r15, r15
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_final_avx1_cmp_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- xor r13b, BYTE PTR [r9+rcx]
- or r15b, r13b
- inc ecx
- cmp ecx, r8d
- jne L_AES_GCM_decrypt_final_avx1_cmp_tag_loop
- cmp r15, 0
- sete r15b
- add rsp, 16
- xor rcx, rcx
- jmp L_AES_GCM_decrypt_final_avx1_cmp_tag_done
- L_AES_GCM_decrypt_final_avx1_cmp_tag_16:
- vmovdqu xmm1, OWORD PTR [r9]
- vpcmpeqb xmm0, xmm0, xmm1
- vpmovmskb rdx, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor r15d, r15d
- cmp edx, 65535
- sete r15b
- L_AES_GCM_decrypt_final_avx1_cmp_tag_done:
- mov DWORD PTR [rbp], r15d
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- vmovdqu xmm8, OWORD PTR [rsp+48]
- vmovdqu xmm9, OWORD PTR [rsp+64]
- vmovdqu xmm10, OWORD PTR [rsp+80]
- vmovdqu xmm11, OWORD PTR [rsp+96]
- vmovdqu xmm12, OWORD PTR [rsp+112]
- vmovdqu xmm13, OWORD PTR [rsp+128]
- vmovdqu xmm15, OWORD PTR [rsp+144]
- add rsp, 160
- pop r15
- pop rbp
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_decrypt_final_avx1 ENDP
- _text ENDS
- ENDIF
- IFDEF HAVE_INTEL_AVX2
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_one QWORD 0, 1
- ptr_L_avx2_aes_gcm_one QWORD L_avx2_aes_gcm_one
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_two QWORD 0, 2
- ptr_L_avx2_aes_gcm_two QWORD L_avx2_aes_gcm_two
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_three QWORD 0, 3
- ptr_L_avx2_aes_gcm_three QWORD L_avx2_aes_gcm_three
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_four QWORD 0, 4
- ptr_L_avx2_aes_gcm_four QWORD L_avx2_aes_gcm_four
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_five QWORD 0, 5
- ptr_L_avx2_aes_gcm_five QWORD L_avx2_aes_gcm_five
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_six QWORD 0, 6
- ptr_L_avx2_aes_gcm_six QWORD L_avx2_aes_gcm_six
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_seven QWORD 0, 7
- ptr_L_avx2_aes_gcm_seven QWORD L_avx2_aes_gcm_seven
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_eight QWORD 0, 8
- ptr_L_avx2_aes_gcm_eight QWORD L_avx2_aes_gcm_eight
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_bswap_one QWORD 0, 72057594037927936
- ptr_L_avx2_aes_gcm_bswap_one QWORD L_avx2_aes_gcm_bswap_one
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_bswap_epi64 QWORD 283686952306183, 579005069656919567
- ptr_L_avx2_aes_gcm_bswap_epi64 QWORD L_avx2_aes_gcm_bswap_epi64
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_bswap_mask QWORD 579005069656919567, 283686952306183
- ptr_L_avx2_aes_gcm_bswap_mask QWORD L_avx2_aes_gcm_bswap_mask
- _DATA ENDS
- _DATA SEGMENT
- ALIGN 16
- L_avx2_aes_gcm_mod2_128 QWORD 1, 13979173243358019584
- ptr_L_avx2_aes_gcm_mod2_128 QWORD L_avx2_aes_gcm_mod2_128
- _DATA ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_avx2 PROC
- push r13
- push rdi
- push r12
- push r15
- push rbx
- push r14
- push rsi
- mov rdi, rcx
- mov r12, r8
- mov rax, r9
- mov r15, QWORD PTR [rsp+96]
- mov r8, rdx
- mov r10d, DWORD PTR [rsp+104]
- mov r11d, DWORD PTR [rsp+112]
- mov ebx, DWORD PTR [rsp+120]
- mov r14d, DWORD PTR [rsp+128]
- mov rsi, QWORD PTR [rsp+136]
- mov r9d, DWORD PTR [rsp+144]
- sub rsp, 320
- vmovdqu OWORD PTR [rsp+160], xmm6
- vmovdqu OWORD PTR [rsp+176], xmm7
- vmovdqu OWORD PTR [rsp+192], xmm8
- vmovdqu OWORD PTR [rsp+208], xmm9
- vmovdqu OWORD PTR [rsp+224], xmm10
- vmovdqu OWORD PTR [rsp+240], xmm11
- vmovdqu OWORD PTR [rsp+256], xmm12
- vmovdqu OWORD PTR [rsp+272], xmm13
- vmovdqu OWORD PTR [rsp+288], xmm14
- vmovdqu OWORD PTR [rsp+304], xmm15
- vpxor xmm4, xmm4, xmm4
- vpxor xmm6, xmm6, xmm6
- mov edx, ebx
- cmp edx, 12
- je L_AES_GCM_encrypt_avx2_iv_12
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqu xmm5, OWORD PTR [rsi]
- vaesenc xmm5, xmm5, [rsi+16]
- vaesenc xmm5, xmm5, [rsi+32]
- vaesenc xmm5, xmm5, [rsi+48]
- vaesenc xmm5, xmm5, [rsi+64]
- vaesenc xmm5, xmm5, [rsi+80]
- vaesenc xmm5, xmm5, [rsi+96]
- vaesenc xmm5, xmm5, [rsi+112]
- vaesenc xmm5, xmm5, [rsi+128]
- vaesenc xmm5, xmm5, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_encrypt_avx2_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_encrypt_avx2_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_avx2_calc_iv_16_loop:
- vmovdqu xmm0, OWORD PTR [rax+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx2_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_encrypt_avx2_calc_iv_done
- L_AES_GCM_encrypt_avx2_calc_iv_lt16:
- vpxor xmm0, xmm0, xmm0
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_avx2_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx2_calc_iv_loop
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- L_AES_GCM_encrypt_avx2_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqu xmm15, OWORD PTR [rsi]
- vpxor xmm15, xmm15, xmm4
- vaesenc xmm15, xmm15, [rsi+16]
- vaesenc xmm15, xmm15, [rsi+32]
- vaesenc xmm15, xmm15, [rsi+48]
- vaesenc xmm15, xmm15, [rsi+64]
- vaesenc xmm15, xmm15, [rsi+80]
- vaesenc xmm15, xmm15, [rsi+96]
- vaesenc xmm15, xmm15, [rsi+112]
- vaesenc xmm15, xmm15, [rsi+128]
- vaesenc xmm15, xmm15, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm15, xmm15, xmm0
- vaesenc xmm15, xmm15, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm15, xmm15, xmm0
- vaesenc xmm15, xmm15, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm15, xmm15, xmm0
- jmp L_AES_GCM_encrypt_avx2_iv_done
- L_AES_GCM_encrypt_avx2_iv_12:
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- vmovdqu xmm4, OWORD PTR L_avx2_aes_gcm_bswap_one
- vmovdqu xmm5, OWORD PTR [rsi]
- vpblendd xmm4, xmm4, [rax], 7
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqu xmm7, OWORD PTR [rsi+16]
- vpxor xmm15, xmm4, xmm5
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rsi+32]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+48]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+64]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+80]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+96]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+112]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+128]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+144]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+176]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+208]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm0
- vaesenclast xmm15, xmm15, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- L_AES_GCM_encrypt_avx2_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_encrypt_avx2_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_encrypt_avx2_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_encrypt_avx2_calc_aad_16_loop:
- vmovdqu xmm0, OWORD PTR [r12+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm6, 16
- vpclmulqdq xmm1, xmm5, xmm6, 1
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm6, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx2_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_encrypt_avx2_calc_aad_done
- L_AES_GCM_encrypt_avx2_calc_aad_lt16:
- vpxor xmm0, xmm0, xmm0
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_avx2_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_encrypt_avx2_calc_aad_loop
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm6, 16
- vpclmulqdq xmm1, xmm5, xmm6, 1
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm6, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- L_AES_GCM_encrypt_avx2_calc_aad_done:
- ; Calculate counter and H
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm5, xmm5, xmm0
- xor ebx, ebx
- cmp r10d, 128
- mov r13d, r10d
- jl L_AES_GCM_encrypt_avx2_done_128
- and r13d, 4294967168
- vmovdqu OWORD PTR [rsp+128], xmm4
- vmovdqu OWORD PTR [rsp+144], xmm15
- vmovdqu xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128
- ; H ^ 1 and H ^ 2
- vpclmulqdq xmm9, xmm5, xmm5, 0
- vpclmulqdq xmm10, xmm5, xmm5, 17
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpxor xmm0, xmm10, xmm9
- vmovdqu OWORD PTR [rsp], xmm5
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3 and H ^ 4
- vpclmulqdq xmm11, xmm0, xmm5, 16
- vpclmulqdq xmm10, xmm0, xmm5, 1
- vpclmulqdq xmm9, xmm0, xmm5, 0
- vpclmulqdq xmm12, xmm0, xmm5, 17
- vpclmulqdq xmm13, xmm0, xmm0, 0
- vpclmulqdq xmm14, xmm0, xmm0, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm2, xmm13, xmm14
- vpxor xmm1, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+32], xmm1
- vmovdqu OWORD PTR [rsp+48], xmm2
- ; H ^ 5 and H ^ 6
- vpclmulqdq xmm11, xmm1, xmm0, 16
- vpclmulqdq xmm10, xmm1, xmm0, 1
- vpclmulqdq xmm9, xmm1, xmm0, 0
- vpclmulqdq xmm12, xmm1, xmm0, 17
- vpclmulqdq xmm13, xmm1, xmm1, 0
- vpclmulqdq xmm14, xmm1, xmm1, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+64], xmm7
- vmovdqu OWORD PTR [rsp+80], xmm0
- ; H ^ 7 and H ^ 8
- vpclmulqdq xmm11, xmm2, xmm1, 16
- vpclmulqdq xmm10, xmm2, xmm1, 1
- vpclmulqdq xmm9, xmm2, xmm1, 0
- vpclmulqdq xmm12, xmm2, xmm1, 17
- vpclmulqdq xmm13, xmm2, xmm2, 0
- vpclmulqdq xmm14, xmm2, xmm2, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+96], xmm7
- vmovdqu OWORD PTR [rsp+112], xmm0
- ; First 128 bytes of input
- ; aesenc_128
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rsi]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+16]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+32]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+48]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+64]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+80]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+96]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+112]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+128]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+144]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r9d, 11
- vmovdqu xmm7, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r9d, 13
- vmovdqu xmm7, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_aesenc_128_enc_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rdi]
- vmovdqu xmm1, OWORD PTR [rdi+16]
- vmovdqu xmm2, OWORD PTR [rdi+32]
- vmovdqu xmm3, OWORD PTR [rdi+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [r8], xmm8
- vmovdqu OWORD PTR [r8+16], xmm9
- vmovdqu OWORD PTR [r8+32], xmm10
- vmovdqu OWORD PTR [r8+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rdi+64]
- vmovdqu xmm1, OWORD PTR [rdi+80]
- vmovdqu xmm2, OWORD PTR [rdi+96]
- vmovdqu xmm3, OWORD PTR [rdi+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [r8+64], xmm12
- vmovdqu OWORD PTR [r8+80], xmm13
- vmovdqu OWORD PTR [r8+96], xmm14
- vmovdqu OWORD PTR [r8+112], xmm15
- cmp r13d, 128
- mov ebx, 128
- jle L_AES_GCM_encrypt_avx2_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_avx2_ghash_128:
- ; aesenc_128_ghash
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [r8+rbx]
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rsi]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- ; aesenc_pclmul_1
- vmovdqu xmm1, OWORD PTR [rdx+-128]
- vmovdqu xmm0, OWORD PTR [rsi+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vmovdqu xmm2, OWORD PTR [rsp+112]
- vpxor xmm1, xmm1, xmm6
- vpclmulqdq xmm5, xmm1, xmm2, 16
- vpclmulqdq xmm3, xmm1, xmm2, 1
- vpclmulqdq xmm6, xmm1, xmm2, 0
- vpclmulqdq xmm7, xmm1, xmm2, 17
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_2
- vmovdqu xmm1, OWORD PTR [rdx+-112]
- vmovdqu xmm0, OWORD PTR [rsp+96]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+32]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-96]
- vmovdqu xmm0, OWORD PTR [rsp+80]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+48]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-80]
- vmovdqu xmm0, OWORD PTR [rsp+64]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+64]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-64]
- vmovdqu xmm0, OWORD PTR [rsp+48]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+80]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-48]
- vmovdqu xmm0, OWORD PTR [rsp+32]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+96]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-32]
- vmovdqu xmm0, OWORD PTR [rsp+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+112]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-16]
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+128]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_l
- vpxor xmm5, xmm5, xmm2
- vpxor xmm6, xmm6, xmm4
- vpxor xmm5, xmm5, xmm3
- vpslldq xmm1, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vmovdqu xmm4, OWORD PTR [rsi+144]
- vmovdqu xmm0, OWORD PTR L_avx2_aes_gcm_mod2_128
- vaesenc xmm8, xmm8, xmm4
- vpxor xmm6, xmm6, xmm1
- vpxor xmm7, xmm7, xmm5
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm9, xmm9, xmm4
- vaesenc xmm10, xmm10, xmm4
- vaesenc xmm11, xmm11, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm12, xmm12, xmm4
- vaesenc xmm13, xmm13, xmm4
- vaesenc xmm14, xmm14, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpxor xmm6, xmm6, xmm7
- vaesenc xmm15, xmm15, xmm4
- cmp r9d, 11
- vmovdqu xmm7, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r9d, 13
- vmovdqu xmm7, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_aesenc_128_ghash_avx_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm2, OWORD PTR [rcx+32]
- vmovdqu xmm3, OWORD PTR [rcx+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm2, OWORD PTR [rcx+96]
- vmovdqu xmm3, OWORD PTR [rcx+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- ; aesenc_128_ghash - end
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_avx2_ghash_128
- L_AES_GCM_encrypt_avx2_end_128:
- vmovdqu xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpshufb xmm8, xmm8, xmm4
- vpshufb xmm9, xmm9, xmm4
- vpshufb xmm10, xmm10, xmm4
- vpshufb xmm11, xmm11, xmm4
- vpshufb xmm12, xmm12, xmm4
- vpshufb xmm13, xmm13, xmm4
- vpshufb xmm14, xmm14, xmm4
- vpshufb xmm15, xmm15, xmm4
- vpxor xmm8, xmm8, xmm6
- vmovdqu xmm7, OWORD PTR [rsp]
- vpclmulqdq xmm5, xmm7, xmm15, 16
- vpclmulqdq xmm1, xmm7, xmm15, 1
- vpclmulqdq xmm4, xmm7, xmm15, 0
- vpclmulqdq xmm6, xmm7, xmm15, 17
- vpxor xmm5, xmm5, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vpclmulqdq xmm2, xmm7, xmm14, 16
- vpclmulqdq xmm1, xmm7, xmm14, 1
- vpclmulqdq xmm0, xmm7, xmm14, 0
- vpclmulqdq xmm3, xmm7, xmm14, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+32]
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vpclmulqdq xmm2, xmm15, xmm13, 16
- vpclmulqdq xmm1, xmm15, xmm13, 1
- vpclmulqdq xmm0, xmm15, xmm13, 0
- vpclmulqdq xmm3, xmm15, xmm13, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm12, 16
- vpclmulqdq xmm1, xmm7, xmm12, 1
- vpclmulqdq xmm0, xmm7, xmm12, 0
- vpclmulqdq xmm3, xmm7, xmm12, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+64]
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vpclmulqdq xmm2, xmm15, xmm11, 16
- vpclmulqdq xmm1, xmm15, xmm11, 1
- vpclmulqdq xmm0, xmm15, xmm11, 0
- vpclmulqdq xmm3, xmm15, xmm11, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm10, 16
- vpclmulqdq xmm1, xmm7, xmm10, 1
- vpclmulqdq xmm0, xmm7, xmm10, 0
- vpclmulqdq xmm3, xmm7, xmm10, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+96]
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vpclmulqdq xmm2, xmm15, xmm9, 16
- vpclmulqdq xmm1, xmm15, xmm9, 1
- vpclmulqdq xmm0, xmm15, xmm9, 0
- vpclmulqdq xmm3, xmm15, xmm9, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm8, 16
- vpclmulqdq xmm1, xmm7, xmm8, 1
- vpclmulqdq xmm0, xmm7, xmm8, 0
- vpclmulqdq xmm3, xmm7, xmm8, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpslldq xmm7, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vpxor xmm4, xmm4, xmm7
- vpxor xmm6, xmm6, xmm5
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm4, xmm2, 16
- vpshufd xmm1, xmm4, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm5, OWORD PTR [rsp]
- vmovdqu xmm4, OWORD PTR [rsp+128]
- vmovdqu xmm15, OWORD PTR [rsp+144]
- L_AES_GCM_encrypt_avx2_done_128:
- cmp ebx, r10d
- je L_AES_GCM_encrypt_avx2_done_enc
- mov r13d, r10d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_avx2_last_block_done
- ; aesenc_block
- vmovdqu xmm1, xmm4
- vpshufb xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm0, xmm0, [rsi]
- vmovdqu xmm2, OWORD PTR [rsi+16]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+32]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+48]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+64]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+80]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+96]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+112]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+128]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rsi+144]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm4, xmm1
- cmp r9d, 11
- vmovdqu xmm1, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rsi+176]
- vaesenc xmm0, xmm0, xmm2
- cmp r9d, 13
- vmovdqu xmm1, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rsi+208]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm1, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_aesenc_block_last:
- vaesenclast xmm0, xmm0, xmm1
- vmovdqu xmm1, OWORD PTR [rdi+rbx]
- vpxor xmm0, xmm0, xmm1
- vmovdqu OWORD PTR [r8+rbx], xmm0
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- add ebx, 16
- cmp ebx, r13d
- jge L_AES_GCM_encrypt_avx2_last_block_ghash
- L_AES_GCM_encrypt_avx2_last_block_start:
- vmovdqu xmm12, OWORD PTR [rdi+rbx]
- vpshufb xmm11, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- ; aesenc_gfmul_sb
- vpclmulqdq xmm2, xmm6, xmm5, 1
- vpclmulqdq xmm3, xmm6, xmm5, 16
- vpclmulqdq xmm1, xmm6, xmm5, 0
- vpclmulqdq xmm8, xmm6, xmm5, 17
- vpxor xmm11, xmm11, [rsi]
- vaesenc xmm11, xmm11, [rsi+16]
- vpxor xmm3, xmm3, xmm2
- vpslldq xmm2, xmm3, 8
- vpsrldq xmm3, xmm3, 8
- vaesenc xmm11, xmm11, [rsi+32]
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm11, xmm11, [rsi+48]
- vaesenc xmm11, xmm11, [rsi+64]
- vaesenc xmm11, xmm11, [rsi+80]
- vpshufd xmm2, xmm2, 78
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm11, xmm11, [rsi+96]
- vaesenc xmm11, xmm11, [rsi+112]
- vaesenc xmm11, xmm11, [rsi+128]
- vpshufd xmm2, xmm2, 78
- vaesenc xmm11, xmm11, [rsi+144]
- vpxor xmm8, xmm8, xmm3
- vpxor xmm2, xmm2, xmm8
- vmovdqu xmm0, OWORD PTR [rsi+160]
- cmp r9d, 11
- jl L_AES_GCM_encrypt_avx2_aesenc_gfmul_sb_last
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm11, xmm11, [rsi+176]
- vmovdqu xmm0, OWORD PTR [rsi+192]
- cmp r9d, 13
- jl L_AES_GCM_encrypt_avx2_aesenc_gfmul_sb_last
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm11, xmm11, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_aesenc_gfmul_sb_last:
- vaesenclast xmm11, xmm11, xmm0
- vpxor xmm6, xmm2, xmm1
- vpxor xmm11, xmm11, xmm12
- vmovdqu OWORD PTR [r8+rbx], xmm11
- vpshufb xmm11, xmm11, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm11
- add ebx, 16
- cmp ebx, r13d
- jl L_AES_GCM_encrypt_avx2_last_block_start
- L_AES_GCM_encrypt_avx2_last_block_ghash:
- ; ghash_gfmul_red
- vpclmulqdq xmm10, xmm6, xmm5, 16
- vpclmulqdq xmm9, xmm6, xmm5, 1
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm10, xmm10, xmm9
- vpslldq xmm9, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm6, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm9, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm6, xmm6, xmm10
- vpxor xmm6, xmm6, xmm9
- vpxor xmm6, xmm6, xmm8
- L_AES_GCM_encrypt_avx2_last_block_done:
- mov ecx, r10d
- mov edx, r10d
- and ecx, 15
- jz L_AES_GCM_encrypt_avx2_done_enc
- ; aesenc_last15_enc
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpxor xmm4, xmm4, [rsi]
- vaesenc xmm4, xmm4, [rsi+16]
- vaesenc xmm4, xmm4, [rsi+32]
- vaesenc xmm4, xmm4, [rsi+48]
- vaesenc xmm4, xmm4, [rsi+64]
- vaesenc xmm4, xmm4, [rsi+80]
- vaesenc xmm4, xmm4, [rsi+96]
- vaesenc xmm4, xmm4, [rsi+112]
- vaesenc xmm4, xmm4, [rsi+128]
- vaesenc xmm4, xmm4, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm0
- vaesenc xmm4, xmm4, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm0
- vaesenc xmm4, xmm4, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_aesenc_avx_last:
- vaesenclast xmm4, xmm4, xmm0
- xor ecx, ecx
- vpxor xmm0, xmm0, xmm0
- vmovdqu OWORD PTR [rsp], xmm4
- vmovdqu OWORD PTR [rsp+16], xmm0
- L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [rsp+rcx+16], r13b
- mov BYTE PTR [r8+rbx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_loop
- L_AES_GCM_encrypt_avx2_aesenc_last15_enc_avx_finish_enc:
- vmovdqu xmm4, OWORD PTR [rsp+16]
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm4
- ; ghash_gfmul_red
- vpclmulqdq xmm2, xmm6, xmm5, 16
- vpclmulqdq xmm1, xmm6, xmm5, 1
- vpclmulqdq xmm0, xmm6, xmm5, 0
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm6, xmm6, xmm5, 17
- vpclmulqdq xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm6, xmm6, xmm2
- vpxor xmm6, xmm6, xmm1
- vpxor xmm6, xmm6, xmm0
- L_AES_GCM_encrypt_avx2_done_enc:
- ; calc_tag
- shl r10, 3
- shl r11, 3
- vmovq xmm0, r10
- vmovq xmm1, r11
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm6
- ; ghash_gfmul_red
- vpclmulqdq xmm4, xmm0, xmm5, 16
- vpclmulqdq xmm3, xmm0, xmm5, 1
- vpclmulqdq xmm2, xmm0, xmm5, 0
- vpxor xmm4, xmm4, xmm3
- vpslldq xmm3, xmm4, 8
- vpsrldq xmm4, xmm4, 8
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm0, xmm0, xmm5, 17
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm0, xmm0, xmm4
- vpxor xmm0, xmm0, xmm3
- vpxor xmm0, xmm0, xmm2
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm15
- ; store_tag
- cmp r14d, 16
- je L_AES_GCM_encrypt_avx2_store_tag_16
- xor rcx, rcx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_avx2_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r15+rcx], r13b
- inc ecx
- cmp ecx, r14d
- jne L_AES_GCM_encrypt_avx2_store_tag_loop
- jmp L_AES_GCM_encrypt_avx2_store_tag_done
- L_AES_GCM_encrypt_avx2_store_tag_16:
- vmovdqu OWORD PTR [r15], xmm0
- L_AES_GCM_encrypt_avx2_store_tag_done:
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+160]
- vmovdqu xmm7, OWORD PTR [rsp+176]
- vmovdqu xmm8, OWORD PTR [rsp+192]
- vmovdqu xmm9, OWORD PTR [rsp+208]
- vmovdqu xmm10, OWORD PTR [rsp+224]
- vmovdqu xmm11, OWORD PTR [rsp+240]
- vmovdqu xmm12, OWORD PTR [rsp+256]
- vmovdqu xmm13, OWORD PTR [rsp+272]
- vmovdqu xmm14, OWORD PTR [rsp+288]
- vmovdqu xmm15, OWORD PTR [rsp+304]
- add rsp, 320
- pop rsi
- pop r14
- pop rbx
- pop r15
- pop r12
- pop rdi
- pop r13
- ret
- AES_GCM_encrypt_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_avx2 PROC
- push r13
- push rdi
- push r12
- push r14
- push rbx
- push r15
- push rsi
- push rbp
- mov rdi, rcx
- mov r12, r8
- mov rax, r9
- mov r14, QWORD PTR [rsp+104]
- mov r8, rdx
- mov r10d, DWORD PTR [rsp+112]
- mov r11d, DWORD PTR [rsp+120]
- mov ebx, DWORD PTR [rsp+128]
- mov r15d, DWORD PTR [rsp+136]
- mov rsi, QWORD PTR [rsp+144]
- mov r9d, DWORD PTR [rsp+152]
- mov rbp, QWORD PTR [rsp+160]
- sub rsp, 328
- vmovdqu OWORD PTR [rsp+168], xmm6
- vmovdqu OWORD PTR [rsp+184], xmm7
- vmovdqu OWORD PTR [rsp+200], xmm8
- vmovdqu OWORD PTR [rsp+216], xmm9
- vmovdqu OWORD PTR [rsp+232], xmm10
- vmovdqu OWORD PTR [rsp+248], xmm11
- vmovdqu OWORD PTR [rsp+264], xmm12
- vmovdqu OWORD PTR [rsp+280], xmm13
- vmovdqu OWORD PTR [rsp+296], xmm14
- vmovdqu OWORD PTR [rsp+312], xmm15
- vpxor xmm4, xmm4, xmm4
- vpxor xmm6, xmm6, xmm6
- mov edx, ebx
- cmp edx, 12
- je L_AES_GCM_decrypt_avx2_iv_12
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqu xmm5, OWORD PTR [rsi]
- vaesenc xmm5, xmm5, [rsi+16]
- vaesenc xmm5, xmm5, [rsi+32]
- vaesenc xmm5, xmm5, [rsi+48]
- vaesenc xmm5, xmm5, [rsi+64]
- vaesenc xmm5, xmm5, [rsi+80]
- vaesenc xmm5, xmm5, [rsi+96]
- vaesenc xmm5, xmm5, [rsi+112]
- vaesenc xmm5, xmm5, [rsi+128]
- vaesenc xmm5, xmm5, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_decrypt_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_decrypt_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_decrypt_avx2_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_decrypt_avx2_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_avx2_calc_iv_16_loop:
- vmovdqu xmm0, OWORD PTR [rax+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx2_calc_iv_16_loop
- mov edx, ebx
- cmp ecx, edx
- je L_AES_GCM_decrypt_avx2_calc_iv_done
- L_AES_GCM_decrypt_avx2_calc_iv_lt16:
- vpxor xmm0, xmm0, xmm0
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_avx2_calc_iv_loop:
- movzx r13d, BYTE PTR [rax+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx2_calc_iv_loop
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- L_AES_GCM_decrypt_avx2_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqu xmm15, OWORD PTR [rsi]
- vpxor xmm15, xmm15, xmm4
- vaesenc xmm15, xmm15, [rsi+16]
- vaesenc xmm15, xmm15, [rsi+32]
- vaesenc xmm15, xmm15, [rsi+48]
- vaesenc xmm15, xmm15, [rsi+64]
- vaesenc xmm15, xmm15, [rsi+80]
- vaesenc xmm15, xmm15, [rsi+96]
- vaesenc xmm15, xmm15, [rsi+112]
- vaesenc xmm15, xmm15, [rsi+128]
- vaesenc xmm15, xmm15, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_decrypt_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm15, xmm15, xmm0
- vaesenc xmm15, xmm15, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_decrypt_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm15, xmm15, xmm0
- vaesenc xmm15, xmm15, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm15, xmm15, xmm0
- jmp L_AES_GCM_decrypt_avx2_iv_done
- L_AES_GCM_decrypt_avx2_iv_12:
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- vmovdqu xmm4, OWORD PTR L_avx2_aes_gcm_bswap_one
- vmovdqu xmm5, OWORD PTR [rsi]
- vpblendd xmm4, xmm4, [rax], 7
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqu xmm7, OWORD PTR [rsi+16]
- vpxor xmm15, xmm4, xmm5
- vaesenc xmm5, xmm5, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rsi+32]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+48]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+64]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+80]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+96]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+112]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+128]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+144]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- cmp r9d, 11
- vmovdqu xmm0, OWORD PTR [rsi+160]
- jl L_AES_GCM_decrypt_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+176]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- cmp r9d, 13
- vmovdqu xmm0, OWORD PTR [rsi+192]
- jl L_AES_GCM_decrypt_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+208]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm15, xmm15, xmm0
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm0
- vaesenclast xmm15, xmm15, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- L_AES_GCM_decrypt_avx2_iv_done:
- ; Additional authentication data
- mov edx, r11d
- cmp edx, 0
- je L_AES_GCM_decrypt_avx2_calc_aad_done
- xor ecx, ecx
- cmp edx, 16
- jl L_AES_GCM_decrypt_avx2_calc_aad_lt16
- and edx, 4294967280
- L_AES_GCM_decrypt_avx2_calc_aad_16_loop:
- vmovdqu xmm0, OWORD PTR [r12+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm6, 16
- vpclmulqdq xmm1, xmm5, xmm6, 1
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm6, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx2_calc_aad_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_decrypt_avx2_calc_aad_done
- L_AES_GCM_decrypt_avx2_calc_aad_lt16:
- vpxor xmm0, xmm0, xmm0
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_avx2_calc_aad_loop:
- movzx r13d, BYTE PTR [r12+rcx]
- mov BYTE PTR [rsp+rbx], r13b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_decrypt_avx2_calc_aad_loop
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm6, 16
- vpclmulqdq xmm1, xmm5, xmm6, 1
- vpclmulqdq xmm0, xmm5, xmm6, 0
- vpclmulqdq xmm3, xmm5, xmm6, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm7, xmm0, xmm1
- vpxor xmm6, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm7, 31
- vpsrld xmm1, xmm6, 31
- vpslld xmm7, xmm7, 1
- vpslld xmm6, xmm6, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm6, xmm6, xmm2
- vpor xmm7, xmm7, xmm0
- vpor xmm6, xmm6, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm7, xmm2, 16
- vpshufd xmm1, xmm7, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- L_AES_GCM_decrypt_avx2_calc_aad_done:
- ; Calculate counter and H
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm5, xmm5, xmm0
- xor ebx, ebx
- cmp r10d, 128
- mov r13d, r10d
- jl L_AES_GCM_decrypt_avx2_done_128
- and r13d, 4294967168
- vmovdqu OWORD PTR [rsp+128], xmm4
- vmovdqu OWORD PTR [rsp+144], xmm15
- vmovdqu xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128
- ; H ^ 1 and H ^ 2
- vpclmulqdq xmm9, xmm5, xmm5, 0
- vpclmulqdq xmm10, xmm5, xmm5, 17
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpxor xmm0, xmm10, xmm9
- vmovdqu OWORD PTR [rsp], xmm5
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3 and H ^ 4
- vpclmulqdq xmm11, xmm0, xmm5, 16
- vpclmulqdq xmm10, xmm0, xmm5, 1
- vpclmulqdq xmm9, xmm0, xmm5, 0
- vpclmulqdq xmm12, xmm0, xmm5, 17
- vpclmulqdq xmm13, xmm0, xmm0, 0
- vpclmulqdq xmm14, xmm0, xmm0, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm2, xmm13, xmm14
- vpxor xmm1, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+32], xmm1
- vmovdqu OWORD PTR [rsp+48], xmm2
- ; H ^ 5 and H ^ 6
- vpclmulqdq xmm11, xmm1, xmm0, 16
- vpclmulqdq xmm10, xmm1, xmm0, 1
- vpclmulqdq xmm9, xmm1, xmm0, 0
- vpclmulqdq xmm12, xmm1, xmm0, 17
- vpclmulqdq xmm13, xmm1, xmm1, 0
- vpclmulqdq xmm14, xmm1, xmm1, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+64], xmm7
- vmovdqu OWORD PTR [rsp+80], xmm0
- ; H ^ 7 and H ^ 8
- vpclmulqdq xmm11, xmm2, xmm1, 16
- vpclmulqdq xmm10, xmm2, xmm1, 1
- vpclmulqdq xmm9, xmm2, xmm1, 0
- vpclmulqdq xmm12, xmm2, xmm1, 17
- vpclmulqdq xmm13, xmm2, xmm2, 0
- vpclmulqdq xmm14, xmm2, xmm2, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+96], xmm7
- vmovdqu OWORD PTR [rsp+112], xmm0
- L_AES_GCM_decrypt_avx2_ghash_128:
- ; aesenc_128_ghash
- lea rcx, QWORD PTR [rdi+rbx]
- lea rdx, QWORD PTR [r8+rbx]
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rsi]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- ; aesenc_pclmul_1
- vmovdqu xmm1, OWORD PTR [rcx]
- vmovdqu xmm0, OWORD PTR [rsi+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vmovdqu xmm2, OWORD PTR [rsp+112]
- vpxor xmm1, xmm1, xmm6
- vpclmulqdq xmm5, xmm1, xmm2, 16
- vpclmulqdq xmm3, xmm1, xmm2, 1
- vpclmulqdq xmm6, xmm1, xmm2, 0
- vpclmulqdq xmm7, xmm1, xmm2, 17
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_2
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm0, OWORD PTR [rsp+96]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+32]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+32]
- vmovdqu xmm0, OWORD PTR [rsp+80]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+48]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vmovdqu xmm0, OWORD PTR [rsp+64]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+64]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+64]
- vmovdqu xmm0, OWORD PTR [rsp+48]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+80]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm0, OWORD PTR [rsp+32]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+96]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+96]
- vmovdqu xmm0, OWORD PTR [rsp+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+112]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rsi+128]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_l
- vpxor xmm5, xmm5, xmm2
- vpxor xmm6, xmm6, xmm4
- vpxor xmm5, xmm5, xmm3
- vpslldq xmm1, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vmovdqu xmm4, OWORD PTR [rsi+144]
- vmovdqu xmm0, OWORD PTR L_avx2_aes_gcm_mod2_128
- vaesenc xmm8, xmm8, xmm4
- vpxor xmm6, xmm6, xmm1
- vpxor xmm7, xmm7, xmm5
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm9, xmm9, xmm4
- vaesenc xmm10, xmm10, xmm4
- vaesenc xmm11, xmm11, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm12, xmm12, xmm4
- vaesenc xmm13, xmm13, xmm4
- vaesenc xmm14, xmm14, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpxor xmm6, xmm6, xmm7
- vaesenc xmm15, xmm15, xmm4
- cmp r9d, 11
- vmovdqu xmm7, OWORD PTR [rsi+160]
- jl L_AES_GCM_decrypt_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r9d, 13
- vmovdqu xmm7, OWORD PTR [rsi+192]
- jl L_AES_GCM_decrypt_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_aesenc_128_ghash_avx_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm2, OWORD PTR [rcx+32]
- vmovdqu xmm3, OWORD PTR [rcx+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm2, OWORD PTR [rcx+96]
- vmovdqu xmm3, OWORD PTR [rcx+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- ; aesenc_128_ghash - end
- add ebx, 128
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_avx2_ghash_128
- vmovdqu xmm5, OWORD PTR [rsp]
- vmovdqu xmm4, OWORD PTR [rsp+128]
- vmovdqu xmm15, OWORD PTR [rsp+144]
- L_AES_GCM_decrypt_avx2_done_128:
- cmp ebx, r10d
- jge L_AES_GCM_decrypt_avx2_done_dec
- mov r13d, r10d
- and r13d, 4294967280
- cmp ebx, r13d
- jge L_AES_GCM_decrypt_avx2_last_block_done
- L_AES_GCM_decrypt_avx2_last_block_start:
- vmovdqu xmm11, OWORD PTR [rdi+rbx]
- vpshufb xmm10, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpshufb xmm12, xmm11, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm12, xmm12, xmm6
- ; aesenc_gfmul_sb
- vpclmulqdq xmm2, xmm12, xmm5, 1
- vpclmulqdq xmm3, xmm12, xmm5, 16
- vpclmulqdq xmm1, xmm12, xmm5, 0
- vpclmulqdq xmm8, xmm12, xmm5, 17
- vpxor xmm10, xmm10, [rsi]
- vaesenc xmm10, xmm10, [rsi+16]
- vpxor xmm3, xmm3, xmm2
- vpslldq xmm2, xmm3, 8
- vpsrldq xmm3, xmm3, 8
- vaesenc xmm10, xmm10, [rsi+32]
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm10, xmm10, [rsi+48]
- vaesenc xmm10, xmm10, [rsi+64]
- vaesenc xmm10, xmm10, [rsi+80]
- vpshufd xmm2, xmm2, 78
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm10, xmm10, [rsi+96]
- vaesenc xmm10, xmm10, [rsi+112]
- vaesenc xmm10, xmm10, [rsi+128]
- vpshufd xmm2, xmm2, 78
- vaesenc xmm10, xmm10, [rsi+144]
- vpxor xmm8, xmm8, xmm3
- vpxor xmm2, xmm2, xmm8
- vmovdqu xmm0, OWORD PTR [rsi+160]
- cmp r9d, 11
- jl L_AES_GCM_decrypt_avx2_aesenc_gfmul_sb_last
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm10, xmm10, [rsi+176]
- vmovdqu xmm0, OWORD PTR [rsi+192]
- cmp r9d, 13
- jl L_AES_GCM_decrypt_avx2_aesenc_gfmul_sb_last
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm10, xmm10, [rsi+208]
- vmovdqu xmm0, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_aesenc_gfmul_sb_last:
- vaesenclast xmm10, xmm10, xmm0
- vpxor xmm6, xmm2, xmm1
- vpxor xmm10, xmm10, xmm11
- vmovdqu OWORD PTR [r8+rbx], xmm10
- add ebx, 16
- cmp ebx, r13d
- jl L_AES_GCM_decrypt_avx2_last_block_start
- L_AES_GCM_decrypt_avx2_last_block_done:
- mov ecx, r10d
- mov edx, r10d
- and ecx, 15
- jz L_AES_GCM_decrypt_avx2_done_dec
- ; aesenc_last15_dec
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpxor xmm4, xmm4, [rsi]
- vaesenc xmm4, xmm4, [rsi+16]
- vaesenc xmm4, xmm4, [rsi+32]
- vaesenc xmm4, xmm4, [rsi+48]
- vaesenc xmm4, xmm4, [rsi+64]
- vaesenc xmm4, xmm4, [rsi+80]
- vaesenc xmm4, xmm4, [rsi+96]
- vaesenc xmm4, xmm4, [rsi+112]
- vaesenc xmm4, xmm4, [rsi+128]
- vaesenc xmm4, xmm4, [rsi+144]
- cmp r9d, 11
- vmovdqu xmm1, OWORD PTR [rsi+160]
- jl L_AES_GCM_decrypt_avx2_aesenc_last15_dec_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm1
- vaesenc xmm4, xmm4, [rsi+176]
- cmp r9d, 13
- vmovdqu xmm1, OWORD PTR [rsi+192]
- jl L_AES_GCM_decrypt_avx2_aesenc_last15_dec_avx_aesenc_avx_last
- vaesenc xmm4, xmm4, xmm1
- vaesenc xmm4, xmm4, [rsi+208]
- vmovdqu xmm1, OWORD PTR [rsi+224]
- L_AES_GCM_decrypt_avx2_aesenc_last15_dec_avx_aesenc_avx_last:
- vaesenclast xmm4, xmm4, xmm1
- xor ecx, ecx
- vpxor xmm0, xmm0, xmm0
- vmovdqu OWORD PTR [rsp], xmm4
- vmovdqu OWORD PTR [rsp+16], xmm0
- L_AES_GCM_decrypt_avx2_aesenc_last15_dec_avx_loop:
- movzx r13d, BYTE PTR [rdi+rbx]
- mov BYTE PTR [rsp+rcx+16], r13b
- xor r13b, BYTE PTR [rsp+rcx]
- mov BYTE PTR [r8+rbx], r13b
- inc ebx
- inc ecx
- cmp ebx, edx
- jl L_AES_GCM_decrypt_avx2_aesenc_last15_dec_avx_loop
- vmovdqu xmm4, OWORD PTR [rsp+16]
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm4
- ; ghash_gfmul_red
- vpclmulqdq xmm2, xmm6, xmm5, 16
- vpclmulqdq xmm1, xmm6, xmm5, 1
- vpclmulqdq xmm0, xmm6, xmm5, 0
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm6, xmm6, xmm5, 17
- vpclmulqdq xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm6, xmm6, xmm2
- vpxor xmm6, xmm6, xmm1
- vpxor xmm6, xmm6, xmm0
- L_AES_GCM_decrypt_avx2_done_dec:
- ; calc_tag
- shl r10, 3
- shl r11, 3
- vmovq xmm0, r10
- vmovq xmm1, r11
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm6
- ; ghash_gfmul_red
- vpclmulqdq xmm4, xmm0, xmm5, 16
- vpclmulqdq xmm3, xmm0, xmm5, 1
- vpclmulqdq xmm2, xmm0, xmm5, 0
- vpxor xmm4, xmm4, xmm3
- vpslldq xmm3, xmm4, 8
- vpsrldq xmm4, xmm4, 8
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm0, xmm0, xmm5, 17
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm0, xmm0, xmm4
- vpxor xmm0, xmm0, xmm3
- vpxor xmm0, xmm0, xmm2
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm15
- ; cmp_tag
- cmp r15d, 16
- je L_AES_GCM_decrypt_avx2_cmp_tag_16
- xor rdx, rdx
- xor rax, rax
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_avx2_cmp_tag_loop:
- movzx r13d, BYTE PTR [rsp+rdx]
- xor r13b, BYTE PTR [r14+rdx]
- or al, r13b
- inc edx
- cmp edx, r15d
- jne L_AES_GCM_decrypt_avx2_cmp_tag_loop
- cmp rax, 0
- sete al
- jmp L_AES_GCM_decrypt_avx2_cmp_tag_done
- L_AES_GCM_decrypt_avx2_cmp_tag_16:
- vmovdqu xmm1, OWORD PTR [r14]
- vpcmpeqb xmm0, xmm0, xmm1
- vpmovmskb rdx, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor eax, eax
- cmp edx, 65535
- sete al
- L_AES_GCM_decrypt_avx2_cmp_tag_done:
- mov DWORD PTR [rbp], eax
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+168]
- vmovdqu xmm7, OWORD PTR [rsp+184]
- vmovdqu xmm8, OWORD PTR [rsp+200]
- vmovdqu xmm9, OWORD PTR [rsp+216]
- vmovdqu xmm10, OWORD PTR [rsp+232]
- vmovdqu xmm11, OWORD PTR [rsp+248]
- vmovdqu xmm12, OWORD PTR [rsp+264]
- vmovdqu xmm13, OWORD PTR [rsp+280]
- vmovdqu xmm14, OWORD PTR [rsp+296]
- vmovdqu xmm15, OWORD PTR [rsp+312]
- add rsp, 328
- pop rbp
- pop rsi
- pop r15
- pop rbx
- pop r14
- pop r12
- pop rdi
- pop r13
- ret
- AES_GCM_decrypt_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_init_avx2 PROC
- push rbx
- push rdi
- push rsi
- push r12
- mov rdi, rcx
- mov rsi, rdx
- mov r10, r8
- mov r11d, r9d
- mov rax, QWORD PTR [rsp+72]
- mov r8, QWORD PTR [rsp+80]
- mov r9, QWORD PTR [rsp+88]
- sub rsp, 48
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vpxor xmm4, xmm4, xmm4
- mov edx, r11d
- cmp edx, 12
- je L_AES_GCM_init_avx2_iv_12
- ; Calculate values when IV is not 12 bytes
- ; H = Encrypt X(=0)
- vmovdqu xmm5, OWORD PTR [rdi]
- vaesenc xmm5, xmm5, [rdi+16]
- vaesenc xmm5, xmm5, [rdi+32]
- vaesenc xmm5, xmm5, [rdi+48]
- vaesenc xmm5, xmm5, [rdi+64]
- vaesenc xmm5, xmm5, [rdi+80]
- vaesenc xmm5, xmm5, [rdi+96]
- vaesenc xmm5, xmm5, [rdi+112]
- vaesenc xmm5, xmm5, [rdi+128]
- vaesenc xmm5, xmm5, [rdi+144]
- cmp esi, 11
- vmovdqu xmm0, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rdi+176]
- cmp esi, 13
- vmovdqu xmm0, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx2_calc_iv_1_aesenc_avx_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm5, xmm5, [rdi+208]
- vmovdqu xmm0, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx2_calc_iv_1_aesenc_avx_last:
- vaesenclast xmm5, xmm5, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Calc counter
- ; Initialization vector
- cmp edx, 0
- mov rcx, 0
- je L_AES_GCM_init_avx2_calc_iv_done
- cmp edx, 16
- jl L_AES_GCM_init_avx2_calc_iv_lt16
- and edx, 4294967280
- L_AES_GCM_init_avx2_calc_iv_16_loop:
- vmovdqu xmm0, OWORD PTR [r10+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm6, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm6, xmm2, 16
- vpshufd xmm1, xmm6, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_init_avx2_calc_iv_16_loop
- mov edx, r11d
- cmp ecx, edx
- je L_AES_GCM_init_avx2_calc_iv_done
- L_AES_GCM_init_avx2_calc_iv_lt16:
- vpxor xmm0, xmm0, xmm0
- xor ebx, ebx
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_init_avx2_calc_iv_loop:
- movzx r12d, BYTE PTR [r10+rcx]
- mov BYTE PTR [rsp+rbx], r12b
- inc ecx
- inc ebx
- cmp ecx, edx
- jl L_AES_GCM_init_avx2_calc_iv_loop
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm6, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm6, xmm2, 16
- vpshufd xmm1, xmm6, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- L_AES_GCM_init_avx2_calc_iv_done:
- ; T = Encrypt counter
- vpxor xmm0, xmm0, xmm0
- shl edx, 3
- vmovq xmm0, rdx
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm6, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm6, xmm2, 16
- vpshufd xmm1, xmm6, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- ; Encrypt counter
- vmovdqu xmm7, OWORD PTR [rdi]
- vpxor xmm7, xmm7, xmm4
- vaesenc xmm7, xmm7, [rdi+16]
- vaesenc xmm7, xmm7, [rdi+32]
- vaesenc xmm7, xmm7, [rdi+48]
- vaesenc xmm7, xmm7, [rdi+64]
- vaesenc xmm7, xmm7, [rdi+80]
- vaesenc xmm7, xmm7, [rdi+96]
- vaesenc xmm7, xmm7, [rdi+112]
- vaesenc xmm7, xmm7, [rdi+128]
- vaesenc xmm7, xmm7, [rdi+144]
- cmp esi, 11
- vmovdqu xmm0, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm7, xmm7, xmm0
- vaesenc xmm7, xmm7, [rdi+176]
- cmp esi, 13
- vmovdqu xmm0, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx2_calc_iv_2_aesenc_avx_last
- vaesenc xmm7, xmm7, xmm0
- vaesenc xmm7, xmm7, [rdi+208]
- vmovdqu xmm0, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx2_calc_iv_2_aesenc_avx_last:
- vaesenclast xmm7, xmm7, xmm0
- jmp L_AES_GCM_init_avx2_iv_done
- L_AES_GCM_init_avx2_iv_12:
- ; # Calculate values when IV is 12 bytes
- ; Set counter based on IV
- vmovdqu xmm4, OWORD PTR L_avx2_aes_gcm_bswap_one
- vmovdqu xmm5, OWORD PTR [rdi]
- vpblendd xmm4, xmm4, [r10], 7
- ; H = Encrypt X(=0) and T = Encrypt counter
- vmovdqu xmm6, OWORD PTR [rdi+16]
- vpxor xmm7, xmm4, xmm5
- vaesenc xmm5, xmm5, xmm6
- vaesenc xmm7, xmm7, xmm6
- vmovdqu xmm0, OWORD PTR [rdi+32]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+48]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+64]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+80]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+96]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+112]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+128]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+144]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- cmp esi, 11
- vmovdqu xmm0, OWORD PTR [rdi+160]
- jl L_AES_GCM_init_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+176]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- cmp esi, 13
- vmovdqu xmm0, OWORD PTR [rdi+192]
- jl L_AES_GCM_init_avx2_calc_iv_12_last
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+208]
- vaesenc xmm5, xmm5, xmm0
- vaesenc xmm7, xmm7, xmm0
- vmovdqu xmm0, OWORD PTR [rdi+224]
- L_AES_GCM_init_avx2_calc_iv_12_last:
- vaesenclast xmm5, xmm5, xmm0
- vaesenclast xmm7, xmm7, xmm0
- vpshufb xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_bswap_mask
- L_AES_GCM_init_avx2_iv_done:
- vmovdqu OWORD PTR [r9], xmm7
- vpshufb xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- vmovdqu OWORD PTR [rax], xmm5
- vmovdqu OWORD PTR [r8], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- add rsp, 48
- pop r12
- pop rsi
- pop rdi
- pop rbx
- ret
- AES_GCM_init_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_aad_update_avx2 PROC
- mov rax, rcx
- sub rsp, 16
- vmovdqu OWORD PTR [rsp], xmm6
- vmovdqu xmm4, OWORD PTR [r8]
- vmovdqu xmm5, OWORD PTR [r9]
- xor ecx, ecx
- L_AES_GCM_aad_update_avx2_16_loop:
- vmovdqu xmm0, OWORD PTR [rax+rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm6, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm6, xmm2, 16
- vpshufd xmm1, xmm6, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- add ecx, 16
- cmp ecx, edx
- jl L_AES_GCM_aad_update_avx2_16_loop
- vmovdqu OWORD PTR [r8], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp]
- add rsp, 16
- ret
- AES_GCM_aad_update_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_block_avx2 PROC
- mov r10, r8
- mov r11, r9
- mov rax, QWORD PTR [rsp+40]
- sub rsp, 152
- vmovdqu xmm3, OWORD PTR [rax]
- ; aesenc_block
- vmovdqu xmm1, xmm3
- vpshufb xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm0, xmm0, [rcx]
- vmovdqu xmm2, OWORD PTR [rcx+16]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+32]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+48]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+64]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+80]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+96]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+112]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+128]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rcx+144]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm3, xmm1
- cmp edx, 11
- vmovdqu xmm1, OWORD PTR [rcx+160]
- jl L_AES_GCM_encrypt_block_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rcx+176]
- vaesenc xmm0, xmm0, xmm2
- cmp edx, 13
- vmovdqu xmm1, OWORD PTR [rcx+192]
- jl L_AES_GCM_encrypt_block_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rcx+208]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm1, OWORD PTR [rcx+224]
- L_AES_GCM_encrypt_block_avx2_aesenc_block_last:
- vaesenclast xmm0, xmm0, xmm1
- vmovdqu xmm1, OWORD PTR [r11]
- vpxor xmm0, xmm0, xmm1
- vmovdqu OWORD PTR [r10], xmm0
- vmovdqu OWORD PTR [rax], xmm3
- vzeroupper
- add rsp, 152
- ret
- AES_GCM_encrypt_block_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_ghash_block_avx2 PROC
- sub rsp, 16
- vmovdqu OWORD PTR [rsp], xmm6
- vmovdqu xmm4, OWORD PTR [rdx]
- vmovdqu xmm5, OWORD PTR [r8]
- vmovdqu xmm0, OWORD PTR [rcx]
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm4, xmm4, xmm0
- ; ghash_gfmul_avx
- vpclmulqdq xmm2, xmm5, xmm4, 16
- vpclmulqdq xmm1, xmm5, xmm4, 1
- vpclmulqdq xmm0, xmm5, xmm4, 0
- vpclmulqdq xmm3, xmm5, xmm4, 17
- vpxor xmm2, xmm2, xmm1
- vpslldq xmm1, xmm2, 8
- vpsrldq xmm2, xmm2, 8
- vpxor xmm6, xmm0, xmm1
- vpxor xmm4, xmm3, xmm2
- ; ghash_mid
- vpsrld xmm0, xmm6, 31
- vpsrld xmm1, xmm4, 31
- vpslld xmm6, xmm6, 1
- vpslld xmm4, xmm4, 1
- vpsrldq xmm2, xmm0, 12
- vpslldq xmm0, xmm0, 4
- vpslldq xmm1, xmm1, 4
- vpor xmm4, xmm4, xmm2
- vpor xmm6, xmm6, xmm0
- vpor xmm4, xmm4, xmm1
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm6, xmm2, 16
- vpshufd xmm1, xmm6, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm4, xmm4, xmm1
- vmovdqu OWORD PTR [rdx], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp]
- add rsp, 16
- ret
- AES_GCM_ghash_block_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_update_avx2 PROC
- push r12
- push r13
- push r14
- push r15
- push rdi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r13, QWORD PTR [rsp+96]
- mov r14, QWORD PTR [rsp+104]
- sub rsp, 312
- vmovdqu OWORD PTR [rsp+152], xmm6
- vmovdqu OWORD PTR [rsp+168], xmm7
- vmovdqu OWORD PTR [rsp+184], xmm8
- vmovdqu OWORD PTR [rsp+200], xmm9
- vmovdqu OWORD PTR [rsp+216], xmm10
- vmovdqu OWORD PTR [rsp+232], xmm11
- vmovdqu OWORD PTR [rsp+248], xmm12
- vmovdqu OWORD PTR [rsp+264], xmm13
- vmovdqu OWORD PTR [rsp+280], xmm14
- vmovdqu OWORD PTR [rsp+296], xmm15
- vmovdqu xmm6, OWORD PTR [r12]
- vmovdqu xmm5, OWORD PTR [r13]
- vmovdqu xmm4, OWORD PTR [r14]
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm0
- xor edi, edi
- cmp r9d, 128
- mov r15d, r9d
- jl L_AES_GCM_encrypt_update_avx2_done_128
- and r15d, 4294967168
- vmovdqu OWORD PTR [rsp+128], xmm4
- vmovdqu xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128
- ; H ^ 1 and H ^ 2
- vpclmulqdq xmm9, xmm5, xmm5, 0
- vpclmulqdq xmm10, xmm5, xmm5, 17
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpxor xmm0, xmm10, xmm9
- vmovdqu OWORD PTR [rsp], xmm5
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3 and H ^ 4
- vpclmulqdq xmm11, xmm0, xmm5, 16
- vpclmulqdq xmm10, xmm0, xmm5, 1
- vpclmulqdq xmm9, xmm0, xmm5, 0
- vpclmulqdq xmm12, xmm0, xmm5, 17
- vpclmulqdq xmm13, xmm0, xmm0, 0
- vpclmulqdq xmm14, xmm0, xmm0, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm2, xmm13, xmm14
- vpxor xmm1, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+32], xmm1
- vmovdqu OWORD PTR [rsp+48], xmm2
- ; H ^ 5 and H ^ 6
- vpclmulqdq xmm11, xmm1, xmm0, 16
- vpclmulqdq xmm10, xmm1, xmm0, 1
- vpclmulqdq xmm9, xmm1, xmm0, 0
- vpclmulqdq xmm12, xmm1, xmm0, 17
- vpclmulqdq xmm13, xmm1, xmm1, 0
- vpclmulqdq xmm14, xmm1, xmm1, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+64], xmm7
- vmovdqu OWORD PTR [rsp+80], xmm0
- ; H ^ 7 and H ^ 8
- vpclmulqdq xmm11, xmm2, xmm1, 16
- vpclmulqdq xmm10, xmm2, xmm1, 1
- vpclmulqdq xmm9, xmm2, xmm1, 0
- vpclmulqdq xmm12, xmm2, xmm1, 17
- vpclmulqdq xmm13, xmm2, xmm2, 0
- vpclmulqdq xmm14, xmm2, xmm2, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+96], xmm7
- vmovdqu OWORD PTR [rsp+112], xmm0
- ; First 128 bytes of input
- ; aesenc_128
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+16]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+32]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+48]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+64]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+80]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+96]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+112]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+128]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+144]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 11
- vmovdqu xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqu xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_128_enc_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx2_aesenc_128_enc_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [r11]
- vmovdqu xmm1, OWORD PTR [r11+16]
- vmovdqu xmm2, OWORD PTR [r11+32]
- vmovdqu xmm3, OWORD PTR [r11+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [r10], xmm8
- vmovdqu OWORD PTR [r10+16], xmm9
- vmovdqu OWORD PTR [r10+32], xmm10
- vmovdqu OWORD PTR [r10+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [r11+64]
- vmovdqu xmm1, OWORD PTR [r11+80]
- vmovdqu xmm2, OWORD PTR [r11+96]
- vmovdqu xmm3, OWORD PTR [r11+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [r10+64], xmm12
- vmovdqu OWORD PTR [r10+80], xmm13
- vmovdqu OWORD PTR [r10+96], xmm14
- vmovdqu OWORD PTR [r10+112], xmm15
- cmp r15d, 128
- mov edi, 128
- jle L_AES_GCM_encrypt_update_avx2_end_128
- ; More 128 bytes of input
- L_AES_GCM_encrypt_update_avx2_ghash_128:
- ; aesenc_128_ghash
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- ; aesenc_pclmul_1
- vmovdqu xmm1, OWORD PTR [rdx+-128]
- vmovdqu xmm0, OWORD PTR [rax+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vmovdqu xmm2, OWORD PTR [rsp+112]
- vpxor xmm1, xmm1, xmm6
- vpclmulqdq xmm5, xmm1, xmm2, 16
- vpclmulqdq xmm3, xmm1, xmm2, 1
- vpclmulqdq xmm6, xmm1, xmm2, 0
- vpclmulqdq xmm7, xmm1, xmm2, 17
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_2
- vmovdqu xmm1, OWORD PTR [rdx+-112]
- vmovdqu xmm0, OWORD PTR [rsp+96]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+32]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-96]
- vmovdqu xmm0, OWORD PTR [rsp+80]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+48]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-80]
- vmovdqu xmm0, OWORD PTR [rsp+64]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+64]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-64]
- vmovdqu xmm0, OWORD PTR [rsp+48]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+80]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-48]
- vmovdqu xmm0, OWORD PTR [rsp+32]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+96]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-32]
- vmovdqu xmm0, OWORD PTR [rsp+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+112]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rdx+-16]
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+128]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_l
- vpxor xmm5, xmm5, xmm2
- vpxor xmm6, xmm6, xmm4
- vpxor xmm5, xmm5, xmm3
- vpslldq xmm1, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vmovdqu xmm4, OWORD PTR [rax+144]
- vmovdqu xmm0, OWORD PTR L_avx2_aes_gcm_mod2_128
- vaesenc xmm8, xmm8, xmm4
- vpxor xmm6, xmm6, xmm1
- vpxor xmm7, xmm7, xmm5
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm9, xmm9, xmm4
- vaesenc xmm10, xmm10, xmm4
- vaesenc xmm11, xmm11, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm12, xmm12, xmm4
- vaesenc xmm13, xmm13, xmm4
- vaesenc xmm14, xmm14, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpxor xmm6, xmm6, xmm7
- vaesenc xmm15, xmm15, xmm4
- cmp r8d, 11
- vmovdqu xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqu xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx2_aesenc_128_ghash_avx_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm2, OWORD PTR [rcx+32]
- vmovdqu xmm3, OWORD PTR [rcx+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm2, OWORD PTR [rcx+96]
- vmovdqu xmm3, OWORD PTR [rcx+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- ; aesenc_128_ghash - end
- add edi, 128
- cmp edi, r15d
- jl L_AES_GCM_encrypt_update_avx2_ghash_128
- L_AES_GCM_encrypt_update_avx2_end_128:
- vmovdqu xmm4, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpshufb xmm8, xmm8, xmm4
- vpshufb xmm9, xmm9, xmm4
- vpshufb xmm10, xmm10, xmm4
- vpshufb xmm11, xmm11, xmm4
- vpshufb xmm12, xmm12, xmm4
- vpshufb xmm13, xmm13, xmm4
- vpshufb xmm14, xmm14, xmm4
- vpshufb xmm15, xmm15, xmm4
- vpxor xmm8, xmm8, xmm6
- vmovdqu xmm7, OWORD PTR [rsp]
- vpclmulqdq xmm5, xmm7, xmm15, 16
- vpclmulqdq xmm1, xmm7, xmm15, 1
- vpclmulqdq xmm4, xmm7, xmm15, 0
- vpclmulqdq xmm6, xmm7, xmm15, 17
- vpxor xmm5, xmm5, xmm1
- vmovdqu xmm7, OWORD PTR [rsp+16]
- vpclmulqdq xmm2, xmm7, xmm14, 16
- vpclmulqdq xmm1, xmm7, xmm14, 1
- vpclmulqdq xmm0, xmm7, xmm14, 0
- vpclmulqdq xmm3, xmm7, xmm14, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+32]
- vmovdqu xmm7, OWORD PTR [rsp+48]
- vpclmulqdq xmm2, xmm15, xmm13, 16
- vpclmulqdq xmm1, xmm15, xmm13, 1
- vpclmulqdq xmm0, xmm15, xmm13, 0
- vpclmulqdq xmm3, xmm15, xmm13, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm12, 16
- vpclmulqdq xmm1, xmm7, xmm12, 1
- vpclmulqdq xmm0, xmm7, xmm12, 0
- vpclmulqdq xmm3, xmm7, xmm12, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+64]
- vmovdqu xmm7, OWORD PTR [rsp+80]
- vpclmulqdq xmm2, xmm15, xmm11, 16
- vpclmulqdq xmm1, xmm15, xmm11, 1
- vpclmulqdq xmm0, xmm15, xmm11, 0
- vpclmulqdq xmm3, xmm15, xmm11, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm10, 16
- vpclmulqdq xmm1, xmm7, xmm10, 1
- vpclmulqdq xmm0, xmm7, xmm10, 0
- vpclmulqdq xmm3, xmm7, xmm10, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vmovdqu xmm15, OWORD PTR [rsp+96]
- vmovdqu xmm7, OWORD PTR [rsp+112]
- vpclmulqdq xmm2, xmm15, xmm9, 16
- vpclmulqdq xmm1, xmm15, xmm9, 1
- vpclmulqdq xmm0, xmm15, xmm9, 0
- vpclmulqdq xmm3, xmm15, xmm9, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpclmulqdq xmm2, xmm7, xmm8, 16
- vpclmulqdq xmm1, xmm7, xmm8, 1
- vpclmulqdq xmm0, xmm7, xmm8, 0
- vpclmulqdq xmm3, xmm7, xmm8, 17
- vpxor xmm2, xmm2, xmm1
- vpxor xmm6, xmm6, xmm3
- vpxor xmm5, xmm5, xmm2
- vpxor xmm4, xmm4, xmm0
- vpslldq xmm7, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vpxor xmm4, xmm4, xmm7
- vpxor xmm6, xmm6, xmm5
- ; ghash_red
- vmovdqu xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpclmulqdq xmm0, xmm4, xmm2, 16
- vpshufd xmm1, xmm4, 78
- vpxor xmm1, xmm1, xmm0
- vpclmulqdq xmm0, xmm1, xmm2, 16
- vpshufd xmm1, xmm1, 78
- vpxor xmm1, xmm1, xmm0
- vpxor xmm6, xmm6, xmm1
- vmovdqu xmm5, OWORD PTR [rsp]
- vmovdqu xmm4, OWORD PTR [rsp+128]
- L_AES_GCM_encrypt_update_avx2_done_128:
- cmp edi, r9d
- je L_AES_GCM_encrypt_update_avx2_done_enc
- mov r15d, r9d
- and r15d, 4294967280
- cmp edi, r15d
- jge L_AES_GCM_encrypt_update_avx2_last_block_done
- ; aesenc_block
- vmovdqu xmm1, xmm4
- vpshufb xmm0, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm0, xmm0, [rax]
- vmovdqu xmm2, OWORD PTR [rax+16]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+32]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+48]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+64]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+80]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+96]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+112]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+128]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm2, OWORD PTR [rax+144]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm4, xmm1
- cmp r8d, 11
- vmovdqu xmm1, OWORD PTR [rax+160]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rax+176]
- vaesenc xmm0, xmm0, xmm2
- cmp r8d, 13
- vmovdqu xmm1, OWORD PTR [rax+192]
- jl L_AES_GCM_encrypt_update_avx2_aesenc_block_last
- vaesenc xmm0, xmm0, xmm1
- vmovdqu xmm2, OWORD PTR [rax+208]
- vaesenc xmm0, xmm0, xmm2
- vmovdqu xmm1, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx2_aesenc_block_last:
- vaesenclast xmm0, xmm0, xmm1
- vmovdqu xmm1, OWORD PTR [r11+rdi]
- vpxor xmm0, xmm0, xmm1
- vmovdqu OWORD PTR [r10+rdi], xmm0
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm0
- add edi, 16
- cmp edi, r15d
- jge L_AES_GCM_encrypt_update_avx2_last_block_ghash
- L_AES_GCM_encrypt_update_avx2_last_block_start:
- vmovdqu xmm12, OWORD PTR [r11+rdi]
- vpshufb xmm11, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- ; aesenc_gfmul_sb
- vpclmulqdq xmm2, xmm6, xmm5, 1
- vpclmulqdq xmm3, xmm6, xmm5, 16
- vpclmulqdq xmm1, xmm6, xmm5, 0
- vpclmulqdq xmm8, xmm6, xmm5, 17
- vpxor xmm11, xmm11, [rax]
- vaesenc xmm11, xmm11, [rax+16]
- vpxor xmm3, xmm3, xmm2
- vpslldq xmm2, xmm3, 8
- vpsrldq xmm3, xmm3, 8
- vaesenc xmm11, xmm11, [rax+32]
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm11, xmm11, [rax+48]
- vaesenc xmm11, xmm11, [rax+64]
- vaesenc xmm11, xmm11, [rax+80]
- vpshufd xmm2, xmm2, 78
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm11, xmm11, [rax+96]
- vaesenc xmm11, xmm11, [rax+112]
- vaesenc xmm11, xmm11, [rax+128]
- vpshufd xmm2, xmm2, 78
- vaesenc xmm11, xmm11, [rax+144]
- vpxor xmm8, xmm8, xmm3
- vpxor xmm2, xmm2, xmm8
- vmovdqu xmm0, OWORD PTR [rax+160]
- cmp r8d, 11
- jl L_AES_GCM_encrypt_update_avx2_aesenc_gfmul_sb_last
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm11, xmm11, [rax+176]
- vmovdqu xmm0, OWORD PTR [rax+192]
- cmp r8d, 13
- jl L_AES_GCM_encrypt_update_avx2_aesenc_gfmul_sb_last
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm11, xmm11, [rax+208]
- vmovdqu xmm0, OWORD PTR [rax+224]
- L_AES_GCM_encrypt_update_avx2_aesenc_gfmul_sb_last:
- vaesenclast xmm11, xmm11, xmm0
- vpxor xmm6, xmm2, xmm1
- vpxor xmm11, xmm11, xmm12
- vmovdqu OWORD PTR [r10+rdi], xmm11
- vpshufb xmm11, xmm11, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm6, xmm6, xmm11
- add edi, 16
- cmp edi, r15d
- jl L_AES_GCM_encrypt_update_avx2_last_block_start
- L_AES_GCM_encrypt_update_avx2_last_block_ghash:
- ; ghash_gfmul_red
- vpclmulqdq xmm10, xmm6, xmm5, 16
- vpclmulqdq xmm9, xmm6, xmm5, 1
- vpclmulqdq xmm8, xmm6, xmm5, 0
- vpxor xmm10, xmm10, xmm9
- vpslldq xmm9, xmm10, 8
- vpsrldq xmm10, xmm10, 8
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm6, xmm6, xmm5, 17
- vpclmulqdq xmm8, xmm9, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm6, xmm6, xmm10
- vpxor xmm6, xmm6, xmm9
- vpxor xmm6, xmm6, xmm8
- L_AES_GCM_encrypt_update_avx2_last_block_done:
- L_AES_GCM_encrypt_update_avx2_done_enc:
- vmovdqu OWORD PTR [r12], xmm6
- vmovdqu OWORD PTR [r14], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+152]
- vmovdqu xmm7, OWORD PTR [rsp+168]
- vmovdqu xmm8, OWORD PTR [rsp+184]
- vmovdqu xmm9, OWORD PTR [rsp+200]
- vmovdqu xmm10, OWORD PTR [rsp+216]
- vmovdqu xmm11, OWORD PTR [rsp+232]
- vmovdqu xmm12, OWORD PTR [rsp+248]
- vmovdqu xmm13, OWORD PTR [rsp+264]
- vmovdqu xmm14, OWORD PTR [rsp+280]
- vmovdqu xmm15, OWORD PTR [rsp+296]
- add rsp, 312
- pop rdi
- pop r15
- pop r14
- pop r13
- pop r12
- ret
- AES_GCM_encrypt_update_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_encrypt_final_avx2 PROC
- push r12
- push r13
- mov eax, DWORD PTR [rsp+56]
- mov r10, QWORD PTR [rsp+64]
- mov r11, QWORD PTR [rsp+72]
- sub rsp, 48
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vmovdqu xmm4, OWORD PTR [rcx]
- vmovdqu xmm5, OWORD PTR [r10]
- vmovdqu xmm6, OWORD PTR [r11]
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm0
- ; calc_tag
- shl r9, 3
- shl rax, 3
- vmovq xmm0, r9
- vmovq xmm1, rax
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm4
- ; ghash_gfmul_red
- vpclmulqdq xmm7, xmm0, xmm5, 16
- vpclmulqdq xmm3, xmm0, xmm5, 1
- vpclmulqdq xmm2, xmm0, xmm5, 0
- vpxor xmm7, xmm7, xmm3
- vpslldq xmm3, xmm7, 8
- vpsrldq xmm7, xmm7, 8
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm0, xmm0, xmm5, 17
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm0, xmm0, xmm7
- vpxor xmm0, xmm0, xmm3
- vpxor xmm0, xmm0, xmm2
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm6
- ; store_tag
- cmp r8d, 16
- je L_AES_GCM_encrypt_final_avx2_store_tag_16
- xor r12, r12
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_encrypt_final_avx2_store_tag_loop:
- movzx r13d, BYTE PTR [rsp+r12]
- mov BYTE PTR [rdx+r12], r13b
- inc r12d
- cmp r12d, r8d
- jne L_AES_GCM_encrypt_final_avx2_store_tag_loop
- jmp L_AES_GCM_encrypt_final_avx2_store_tag_done
- L_AES_GCM_encrypt_final_avx2_store_tag_16:
- vmovdqu OWORD PTR [rdx], xmm0
- L_AES_GCM_encrypt_final_avx2_store_tag_done:
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- add rsp, 48
- pop r13
- pop r12
- ret
- AES_GCM_encrypt_final_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_update_avx2 PROC
- push r13
- push r12
- push r14
- push r15
- push rdi
- mov rax, rcx
- mov r10, r8
- mov r8d, edx
- mov r11, r9
- mov r9d, DWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- mov r14, QWORD PTR [rsp+96]
- mov r15, QWORD PTR [rsp+104]
- sub rsp, 328
- vmovdqu OWORD PTR [rsp+168], xmm6
- vmovdqu OWORD PTR [rsp+184], xmm7
- vmovdqu OWORD PTR [rsp+200], xmm8
- vmovdqu OWORD PTR [rsp+216], xmm9
- vmovdqu OWORD PTR [rsp+232], xmm10
- vmovdqu OWORD PTR [rsp+248], xmm11
- vmovdqu OWORD PTR [rsp+264], xmm12
- vmovdqu OWORD PTR [rsp+280], xmm13
- vmovdqu OWORD PTR [rsp+296], xmm14
- vmovdqu OWORD PTR [rsp+312], xmm15
- vmovdqu xmm6, OWORD PTR [r12]
- vmovdqu xmm5, OWORD PTR [r14]
- vmovdqu xmm4, OWORD PTR [r15]
- ; Calculate H
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm0
- xor edi, edi
- cmp r9d, 128
- mov r13d, r9d
- jl L_AES_GCM_decrypt_update_avx2_done_128
- and r13d, 4294967168
- vmovdqu OWORD PTR [rsp+128], xmm4
- vmovdqu OWORD PTR [rsp+144], xmm15
- vmovdqu xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128
- ; H ^ 1 and H ^ 2
- vpclmulqdq xmm9, xmm5, xmm5, 0
- vpclmulqdq xmm10, xmm5, xmm5, 17
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpclmulqdq xmm8, xmm9, xmm3, 16
- vpshufd xmm9, xmm9, 78
- vpxor xmm9, xmm9, xmm8
- vpxor xmm0, xmm10, xmm9
- vmovdqu OWORD PTR [rsp], xmm5
- vmovdqu OWORD PTR [rsp+16], xmm0
- ; H ^ 3 and H ^ 4
- vpclmulqdq xmm11, xmm0, xmm5, 16
- vpclmulqdq xmm10, xmm0, xmm5, 1
- vpclmulqdq xmm9, xmm0, xmm5, 0
- vpclmulqdq xmm12, xmm0, xmm5, 17
- vpclmulqdq xmm13, xmm0, xmm0, 0
- vpclmulqdq xmm14, xmm0, xmm0, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm2, xmm13, xmm14
- vpxor xmm1, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+32], xmm1
- vmovdqu OWORD PTR [rsp+48], xmm2
- ; H ^ 5 and H ^ 6
- vpclmulqdq xmm11, xmm1, xmm0, 16
- vpclmulqdq xmm10, xmm1, xmm0, 1
- vpclmulqdq xmm9, xmm1, xmm0, 0
- vpclmulqdq xmm12, xmm1, xmm0, 17
- vpclmulqdq xmm13, xmm1, xmm1, 0
- vpclmulqdq xmm14, xmm1, xmm1, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+64], xmm7
- vmovdqu OWORD PTR [rsp+80], xmm0
- ; H ^ 7 and H ^ 8
- vpclmulqdq xmm11, xmm2, xmm1, 16
- vpclmulqdq xmm10, xmm2, xmm1, 1
- vpclmulqdq xmm9, xmm2, xmm1, 0
- vpclmulqdq xmm12, xmm2, xmm1, 17
- vpclmulqdq xmm13, xmm2, xmm2, 0
- vpclmulqdq xmm14, xmm2, xmm2, 17
- vpxor xmm11, xmm11, xmm10
- vpslldq xmm10, xmm11, 8
- vpsrldq xmm11, xmm11, 8
- vpxor xmm10, xmm10, xmm9
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm10, xmm10, xmm9
- vpxor xmm13, xmm13, xmm8
- vpclmulqdq xmm9, xmm10, xmm3, 16
- vpclmulqdq xmm8, xmm13, xmm3, 16
- vpshufd xmm10, xmm10, 78
- vpshufd xmm13, xmm13, 78
- vpxor xmm12, xmm12, xmm11
- vpxor xmm13, xmm13, xmm8
- vpxor xmm10, xmm10, xmm12
- vpxor xmm0, xmm13, xmm14
- vpxor xmm7, xmm10, xmm9
- vmovdqu OWORD PTR [rsp+96], xmm7
- vmovdqu OWORD PTR [rsp+112], xmm0
- L_AES_GCM_decrypt_update_avx2_ghash_128:
- ; aesenc_128_ghash
- lea rcx, QWORD PTR [r11+rdi]
- lea rdx, QWORD PTR [r10+rdi]
- ; aesenc_ctr
- vmovdqu xmm0, OWORD PTR [rsp+128]
- vmovdqu xmm1, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpaddd xmm9, xmm0, OWORD PTR L_avx2_aes_gcm_one
- vpshufb xmm8, xmm0, xmm1
- vpaddd xmm10, xmm0, OWORD PTR L_avx2_aes_gcm_two
- vpshufb xmm9, xmm9, xmm1
- vpaddd xmm11, xmm0, OWORD PTR L_avx2_aes_gcm_three
- vpshufb xmm10, xmm10, xmm1
- vpaddd xmm12, xmm0, OWORD PTR L_avx2_aes_gcm_four
- vpshufb xmm11, xmm11, xmm1
- vpaddd xmm13, xmm0, OWORD PTR L_avx2_aes_gcm_five
- vpshufb xmm12, xmm12, xmm1
- vpaddd xmm14, xmm0, OWORD PTR L_avx2_aes_gcm_six
- vpshufb xmm13, xmm13, xmm1
- vpaddd xmm15, xmm0, OWORD PTR L_avx2_aes_gcm_seven
- vpshufb xmm14, xmm14, xmm1
- vpaddd xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_eight
- vpshufb xmm15, xmm15, xmm1
- ; aesenc_xor
- vmovdqu xmm7, OWORD PTR [rax]
- vmovdqu OWORD PTR [rsp+128], xmm0
- vpxor xmm8, xmm8, xmm7
- vpxor xmm9, xmm9, xmm7
- vpxor xmm10, xmm10, xmm7
- vpxor xmm11, xmm11, xmm7
- vpxor xmm12, xmm12, xmm7
- vpxor xmm13, xmm13, xmm7
- vpxor xmm14, xmm14, xmm7
- vpxor xmm15, xmm15, xmm7
- ; aesenc_pclmul_1
- vmovdqu xmm1, OWORD PTR [rcx]
- vmovdqu xmm0, OWORD PTR [rax+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vmovdqu xmm2, OWORD PTR [rsp+112]
- vpxor xmm1, xmm1, xmm6
- vpclmulqdq xmm5, xmm1, xmm2, 16
- vpclmulqdq xmm3, xmm1, xmm2, 1
- vpclmulqdq xmm6, xmm1, xmm2, 0
- vpclmulqdq xmm7, xmm1, xmm2, 17
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_2
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm0, OWORD PTR [rsp+96]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+32]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+32]
- vmovdqu xmm0, OWORD PTR [rsp+80]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+48]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+48]
- vmovdqu xmm0, OWORD PTR [rsp+64]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+64]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+64]
- vmovdqu xmm0, OWORD PTR [rsp+48]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+80]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm0, OWORD PTR [rsp+32]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+96]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+96]
- vmovdqu xmm0, OWORD PTR [rsp+16]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+112]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_n
- vmovdqu xmm1, OWORD PTR [rcx+112]
- vmovdqu xmm0, OWORD PTR [rsp]
- vpshufb xmm1, xmm1, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm5, xmm5, xmm2
- vpclmulqdq xmm2, xmm1, xmm0, 16
- vpxor xmm5, xmm5, xmm3
- vpclmulqdq xmm3, xmm1, xmm0, 1
- vpxor xmm6, xmm6, xmm4
- vpclmulqdq xmm4, xmm1, xmm0, 0
- vpclmulqdq xmm1, xmm1, xmm0, 17
- vmovdqu xmm0, OWORD PTR [rax+128]
- vpxor xmm7, xmm7, xmm1
- vaesenc xmm8, xmm8, xmm0
- vaesenc xmm9, xmm9, xmm0
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm11, xmm11, xmm0
- vaesenc xmm12, xmm12, xmm0
- vaesenc xmm13, xmm13, xmm0
- vaesenc xmm14, xmm14, xmm0
- vaesenc xmm15, xmm15, xmm0
- ; aesenc_pclmul_l
- vpxor xmm5, xmm5, xmm2
- vpxor xmm6, xmm6, xmm4
- vpxor xmm5, xmm5, xmm3
- vpslldq xmm1, xmm5, 8
- vpsrldq xmm5, xmm5, 8
- vmovdqu xmm4, OWORD PTR [rax+144]
- vmovdqu xmm0, OWORD PTR L_avx2_aes_gcm_mod2_128
- vaesenc xmm8, xmm8, xmm4
- vpxor xmm6, xmm6, xmm1
- vpxor xmm7, xmm7, xmm5
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm9, xmm9, xmm4
- vaesenc xmm10, xmm10, xmm4
- vaesenc xmm11, xmm11, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpclmulqdq xmm3, xmm6, xmm0, 16
- vaesenc xmm12, xmm12, xmm4
- vaesenc xmm13, xmm13, xmm4
- vaesenc xmm14, xmm14, xmm4
- vpshufd xmm6, xmm6, 78
- vpxor xmm6, xmm6, xmm3
- vpxor xmm6, xmm6, xmm7
- vaesenc xmm15, xmm15, xmm4
- cmp r8d, 11
- vmovdqu xmm7, OWORD PTR [rax+160]
- jl L_AES_GCM_decrypt_update_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+176]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- cmp r8d, 13
- vmovdqu xmm7, OWORD PTR [rax+192]
- jl L_AES_GCM_decrypt_update_avx2_aesenc_128_ghash_avx_done
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+208]
- vaesenc xmm8, xmm8, xmm7
- vaesenc xmm9, xmm9, xmm7
- vaesenc xmm10, xmm10, xmm7
- vaesenc xmm11, xmm11, xmm7
- vaesenc xmm12, xmm12, xmm7
- vaesenc xmm13, xmm13, xmm7
- vaesenc xmm14, xmm14, xmm7
- vaesenc xmm15, xmm15, xmm7
- vmovdqu xmm7, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_avx2_aesenc_128_ghash_avx_done:
- ; aesenc_last
- vaesenclast xmm8, xmm8, xmm7
- vaesenclast xmm9, xmm9, xmm7
- vaesenclast xmm10, xmm10, xmm7
- vaesenclast xmm11, xmm11, xmm7
- vmovdqu xmm0, OWORD PTR [rcx]
- vmovdqu xmm1, OWORD PTR [rcx+16]
- vmovdqu xmm2, OWORD PTR [rcx+32]
- vmovdqu xmm3, OWORD PTR [rcx+48]
- vpxor xmm8, xmm8, xmm0
- vpxor xmm9, xmm9, xmm1
- vpxor xmm10, xmm10, xmm2
- vpxor xmm11, xmm11, xmm3
- vmovdqu OWORD PTR [rdx], xmm8
- vmovdqu OWORD PTR [rdx+16], xmm9
- vmovdqu OWORD PTR [rdx+32], xmm10
- vmovdqu OWORD PTR [rdx+48], xmm11
- vaesenclast xmm12, xmm12, xmm7
- vaesenclast xmm13, xmm13, xmm7
- vaesenclast xmm14, xmm14, xmm7
- vaesenclast xmm15, xmm15, xmm7
- vmovdqu xmm0, OWORD PTR [rcx+64]
- vmovdqu xmm1, OWORD PTR [rcx+80]
- vmovdqu xmm2, OWORD PTR [rcx+96]
- vmovdqu xmm3, OWORD PTR [rcx+112]
- vpxor xmm12, xmm12, xmm0
- vpxor xmm13, xmm13, xmm1
- vpxor xmm14, xmm14, xmm2
- vpxor xmm15, xmm15, xmm3
- vmovdqu OWORD PTR [rdx+64], xmm12
- vmovdqu OWORD PTR [rdx+80], xmm13
- vmovdqu OWORD PTR [rdx+96], xmm14
- vmovdqu OWORD PTR [rdx+112], xmm15
- ; aesenc_128_ghash - end
- add edi, 128
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_avx2_ghash_128
- vmovdqu xmm5, OWORD PTR [rsp]
- vmovdqu xmm4, OWORD PTR [rsp+128]
- vmovdqu xmm15, OWORD PTR [rsp+144]
- L_AES_GCM_decrypt_update_avx2_done_128:
- cmp edi, r9d
- jge L_AES_GCM_decrypt_update_avx2_done_dec
- mov r13d, r9d
- and r13d, 4294967280
- cmp edi, r13d
- jge L_AES_GCM_decrypt_update_avx2_last_block_done
- L_AES_GCM_decrypt_update_avx2_last_block_start:
- vmovdqu xmm11, OWORD PTR [r11+rdi]
- vpshufb xmm10, xmm4, OWORD PTR L_avx2_aes_gcm_bswap_epi64
- vpshufb xmm12, xmm11, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpaddd xmm4, xmm4, OWORD PTR L_avx2_aes_gcm_one
- vpxor xmm12, xmm12, xmm6
- ; aesenc_gfmul_sb
- vpclmulqdq xmm2, xmm12, xmm5, 1
- vpclmulqdq xmm3, xmm12, xmm5, 16
- vpclmulqdq xmm1, xmm12, xmm5, 0
- vpclmulqdq xmm8, xmm12, xmm5, 17
- vpxor xmm10, xmm10, [rax]
- vaesenc xmm10, xmm10, [rax+16]
- vpxor xmm3, xmm3, xmm2
- vpslldq xmm2, xmm3, 8
- vpsrldq xmm3, xmm3, 8
- vaesenc xmm10, xmm10, [rax+32]
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm10, xmm10, [rax+48]
- vaesenc xmm10, xmm10, [rax+64]
- vaesenc xmm10, xmm10, [rax+80]
- vpshufd xmm2, xmm2, 78
- vpxor xmm2, xmm2, xmm1
- vpclmulqdq xmm1, xmm2, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vaesenc xmm10, xmm10, [rax+96]
- vaesenc xmm10, xmm10, [rax+112]
- vaesenc xmm10, xmm10, [rax+128]
- vpshufd xmm2, xmm2, 78
- vaesenc xmm10, xmm10, [rax+144]
- vpxor xmm8, xmm8, xmm3
- vpxor xmm2, xmm2, xmm8
- vmovdqu xmm0, OWORD PTR [rax+160]
- cmp r8d, 11
- jl L_AES_GCM_decrypt_update_avx2_aesenc_gfmul_sb_last
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm10, xmm10, [rax+176]
- vmovdqu xmm0, OWORD PTR [rax+192]
- cmp r8d, 13
- jl L_AES_GCM_decrypt_update_avx2_aesenc_gfmul_sb_last
- vaesenc xmm10, xmm10, xmm0
- vaesenc xmm10, xmm10, [rax+208]
- vmovdqu xmm0, OWORD PTR [rax+224]
- L_AES_GCM_decrypt_update_avx2_aesenc_gfmul_sb_last:
- vaesenclast xmm10, xmm10, xmm0
- vpxor xmm6, xmm2, xmm1
- vpxor xmm10, xmm10, xmm11
- vmovdqu OWORD PTR [r10+rdi], xmm10
- add edi, 16
- cmp edi, r13d
- jl L_AES_GCM_decrypt_update_avx2_last_block_start
- L_AES_GCM_decrypt_update_avx2_last_block_done:
- L_AES_GCM_decrypt_update_avx2_done_dec:
- vmovdqu OWORD PTR [r12], xmm6
- vmovdqu OWORD PTR [r15], xmm4
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+168]
- vmovdqu xmm7, OWORD PTR [rsp+184]
- vmovdqu xmm8, OWORD PTR [rsp+200]
- vmovdqu xmm9, OWORD PTR [rsp+216]
- vmovdqu xmm10, OWORD PTR [rsp+232]
- vmovdqu xmm11, OWORD PTR [rsp+248]
- vmovdqu xmm12, OWORD PTR [rsp+264]
- vmovdqu xmm13, OWORD PTR [rsp+280]
- vmovdqu xmm14, OWORD PTR [rsp+296]
- vmovdqu xmm15, OWORD PTR [rsp+312]
- add rsp, 328
- pop rdi
- pop r15
- pop r14
- pop r12
- pop r13
- ret
- AES_GCM_decrypt_update_avx2 ENDP
- _text ENDS
- _text SEGMENT READONLY PARA
- AES_GCM_decrypt_final_avx2 PROC
- push r12
- push r13
- push r14
- mov eax, DWORD PTR [rsp+64]
- mov r10, QWORD PTR [rsp+72]
- mov r11, QWORD PTR [rsp+80]
- mov r12, QWORD PTR [rsp+88]
- sub rsp, 48
- vmovdqu OWORD PTR [rsp+16], xmm6
- vmovdqu OWORD PTR [rsp+32], xmm7
- vmovdqu xmm4, OWORD PTR [rcx]
- vmovdqu xmm5, OWORD PTR [r10]
- vmovdqu xmm6, OWORD PTR [r11]
- vpsrlq xmm1, xmm5, 63
- vpsllq xmm0, xmm5, 1
- vpslldq xmm1, xmm1, 8
- vpor xmm0, xmm0, xmm1
- vpshufd xmm5, xmm5, 255
- vpsrad xmm5, xmm5, 31
- vpand xmm5, xmm5, OWORD PTR L_avx2_aes_gcm_mod2_128
- vpxor xmm5, xmm5, xmm0
- ; calc_tag
- shl r9, 3
- shl rax, 3
- vmovq xmm0, r9
- vmovq xmm1, rax
- vpunpcklqdq xmm0, xmm0, xmm1
- vpxor xmm0, xmm0, xmm4
- ; ghash_gfmul_red
- vpclmulqdq xmm7, xmm0, xmm5, 16
- vpclmulqdq xmm3, xmm0, xmm5, 1
- vpclmulqdq xmm2, xmm0, xmm5, 0
- vpxor xmm7, xmm7, xmm3
- vpslldq xmm3, xmm7, 8
- vpsrldq xmm7, xmm7, 8
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm0, xmm0, xmm5, 17
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm3, xmm3, xmm2
- vpclmulqdq xmm2, xmm3, OWORD PTR L_avx2_aes_gcm_mod2_128, 16
- vpshufd xmm3, xmm3, 78
- vpxor xmm0, xmm0, xmm7
- vpxor xmm0, xmm0, xmm3
- vpxor xmm0, xmm0, xmm2
- vpshufb xmm0, xmm0, OWORD PTR L_avx2_aes_gcm_bswap_mask
- vpxor xmm0, xmm0, xmm6
- ; cmp_tag
- cmp r8d, 16
- je L_AES_GCM_decrypt_final_avx2_cmp_tag_16
- xor r13, r13
- xor r10, r10
- vmovdqu OWORD PTR [rsp], xmm0
- L_AES_GCM_decrypt_final_avx2_cmp_tag_loop:
- movzx r14d, BYTE PTR [rsp+r13]
- xor r14b, BYTE PTR [rdx+r13]
- or r10b, r14b
- inc r13d
- cmp r13d, r8d
- jne L_AES_GCM_decrypt_final_avx2_cmp_tag_loop
- cmp r10, 0
- sete r10b
- jmp L_AES_GCM_decrypt_final_avx2_cmp_tag_done
- L_AES_GCM_decrypt_final_avx2_cmp_tag_16:
- vmovdqu xmm1, OWORD PTR [rdx]
- vpcmpeqb xmm0, xmm0, xmm1
- vpmovmskb r13, xmm0
- ; %%edx == 0xFFFF then return 1 else => return 0
- xor r10d, r10d
- cmp r13d, 65535
- sete r10b
- L_AES_GCM_decrypt_final_avx2_cmp_tag_done:
- mov DWORD PTR [r12], r10d
- vzeroupper
- vmovdqu xmm6, OWORD PTR [rsp+16]
- vmovdqu xmm7, OWORD PTR [rsp+32]
- add rsp, 48
- pop r14
- pop r13
- pop r12
- ret
- AES_GCM_decrypt_final_avx2 ENDP
- _text ENDS
- ENDIF
- END
|